summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/3rdparty/v8/.gitignore8
-rw-r--r--src/3rdparty/v8/AUTHORS7
-rw-r--r--src/3rdparty/v8/ChangeLog576
-rw-r--r--src/3rdparty/v8/LICENSE6
-rw-r--r--src/3rdparty/v8/Makefile72
-rw-r--r--src/3rdparty/v8/SConstruct72
-rw-r--r--src/3rdparty/v8/benchmarks/README.txt7
-rw-r--r--src/3rdparty/v8/benchmarks/base.js4
-rw-r--r--src/3rdparty/v8/benchmarks/navier-stokes.js387
-rw-r--r--src/3rdparty/v8/benchmarks/revisions.html4
-rw-r--r--src/3rdparty/v8/benchmarks/run.html20
-rw-r--r--src/3rdparty/v8/benchmarks/run.js1
-rw-r--r--src/3rdparty/v8/benchmarks/spinning-balls/v.js93
-rw-r--r--src/3rdparty/v8/build/android.gypi225
-rw-r--r--src/3rdparty/v8/build/common.gypi142
-rwxr-xr-xsrc/3rdparty/v8/build/gyp_v814
-rw-r--r--src/3rdparty/v8/build/mipsu.gypi33
-rw-r--r--src/3rdparty/v8/build/standalone.gypi26
-rwxr-xr-x[-rw-r--r--]src/3rdparty/v8/include/v8-debug.h0
-rw-r--r--src/3rdparty/v8/include/v8-profiler.h39
-rw-r--r--src/3rdparty/v8/include/v8.h234
-rw-r--r--src/3rdparty/v8/include/v8stdint.h3
-rw-r--r--src/3rdparty/v8/preparser/preparser-process.cc14
-rw-r--r--src/3rdparty/v8/samples/shell.cc25
-rwxr-xr-xsrc/3rdparty/v8/src/SConscript7
-rw-r--r--src/3rdparty/v8/src/accessors.cc40
-rw-r--r--src/3rdparty/v8/src/accessors.h3
-rw-r--r--src/3rdparty/v8/src/allocation.cc9
-rw-r--r--src/3rdparty/v8/src/allocation.h7
-rw-r--r--src/3rdparty/v8/src/api.cc664
-rw-r--r--src/3rdparty/v8/src/api.h26
-rw-r--r--src/3rdparty/v8/src/apinatives.js4
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm-inl.h30
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm.cc27
-rw-r--r--src/3rdparty/v8/src/arm/assembler-arm.h90
-rw-r--r--src/3rdparty/v8/src/arm/builtins-arm.cc209
-rw-r--r--src/3rdparty/v8/src/arm/code-stubs-arm.cc1660
-rw-r--r--src/3rdparty/v8/src/arm/code-stubs-arm.h75
-rw-r--r--src/3rdparty/v8/src/arm/codegen-arm.cc179
-rw-r--r--src/3rdparty/v8/src/arm/codegen-arm.h15
-rw-r--r--src/3rdparty/v8/src/arm/constants-arm.h57
-rw-r--r--src/3rdparty/v8/src/arm/cpu-arm.cc17
-rw-r--r--src/3rdparty/v8/src/arm/debug-arm.cc46
-rw-r--r--src/3rdparty/v8/src/arm/deoptimizer-arm.cc348
-rw-r--r--src/3rdparty/v8/src/arm/disasm-arm.cc36
-rw-r--r--src/3rdparty/v8/src/arm/frames-arm.h42
-rw-r--r--src/3rdparty/v8/src/arm/full-codegen-arm.cc648
-rw-r--r--src/3rdparty/v8/src/arm/ic-arm.cc140
-rw-r--r--src/3rdparty/v8/src/arm/lithium-arm.cc607
-rw-r--r--src/3rdparty/v8/src/arm/lithium-arm.h235
-rw-r--r--src/3rdparty/v8/src/arm/lithium-codegen-arm.cc1327
-rw-r--r--src/3rdparty/v8/src/arm/lithium-codegen-arm.h60
-rw-r--r--src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc38
-rw-r--r--src/3rdparty/v8/src/arm/macro-assembler-arm.cc502
-rw-r--r--src/3rdparty/v8/src/arm/macro-assembler-arm.h110
-rw-r--r--src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc64
-rw-r--r--src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h8
-rw-r--r--src/3rdparty/v8/src/arm/simulator-arm.cc35
-rw-r--r--src/3rdparty/v8/src/arm/simulator-arm.h7
-rw-r--r--src/3rdparty/v8/src/arm/stub-cache-arm.cc506
-rw-r--r--src/3rdparty/v8/src/array.js99
-rw-r--r--src/3rdparty/v8/src/assembler.cc151
-rw-r--r--src/3rdparty/v8/src/assembler.h76
-rw-r--r--src/3rdparty/v8/src/ast-inl.h136
-rw-r--r--src/3rdparty/v8/src/ast.cc594
-rw-r--r--src/3rdparty/v8/src/ast.h1627
-rw-r--r--src/3rdparty/v8/src/atomicops.h11
-rw-r--r--src/3rdparty/v8/src/atomicops_internals_arm_qnx.h117
-rw-r--r--src/3rdparty/v8/src/atomicops_internals_x86_macosx.h44
-rw-r--r--src/3rdparty/v8/src/bignum-dtoa.h2
-rw-r--r--src/3rdparty/v8/src/bootstrapper.cc458
-rw-r--r--src/3rdparty/v8/src/bootstrapper.h2
-rw-r--r--src/3rdparty/v8/src/builtins.cc387
-rw-r--r--src/3rdparty/v8/src/builtins.h55
-rw-r--r--src/3rdparty/v8/src/bytecodes-irregexp.h43
-rw-r--r--src/3rdparty/v8/src/char-predicates-inl.h4
-rw-r--r--src/3rdparty/v8/src/char-predicates.h2
-rw-r--r--src/3rdparty/v8/src/checks.h136
-rw-r--r--src/3rdparty/v8/src/code-stubs.cc137
-rw-r--r--src/3rdparty/v8/src/code-stubs.h219
-rw-r--r--src/3rdparty/v8/src/codegen.cc26
-rw-r--r--src/3rdparty/v8/src/codegen.h9
-rw-r--r--src/3rdparty/v8/src/collection.js83
-rw-r--r--src/3rdparty/v8/src/compilation-cache.cc45
-rw-r--r--src/3rdparty/v8/src/compilation-cache.h36
-rw-r--r--src/3rdparty/v8/src/compiler.cc199
-rw-r--r--src/3rdparty/v8/src/compiler.h62
-rw-r--r--src/3rdparty/v8/src/contexts.cc114
-rw-r--r--src/3rdparty/v8/src/contexts.h45
-rw-r--r--src/3rdparty/v8/src/conversions-inl.h46
-rw-r--r--src/3rdparty/v8/src/conversions.h18
-rw-r--r--src/3rdparty/v8/src/cpu-profiler.cc13
-rw-r--r--src/3rdparty/v8/src/cpu-profiler.h9
-rw-r--r--src/3rdparty/v8/src/cpu.h2
-rw-r--r--src/3rdparty/v8/src/d8-debug.cc8
-rw-r--r--src/3rdparty/v8/src/d8-posix.cc3
-rw-r--r--src/3rdparty/v8/src/d8-readline.cc37
-rw-r--r--src/3rdparty/v8/src/d8.cc347
-rw-r--r--src/3rdparty/v8/src/d8.gyp9
-rw-r--r--src/3rdparty/v8/src/d8.h44
-rw-r--r--src/3rdparty/v8/src/d8.js140
-rw-r--r--src/3rdparty/v8/src/data-flow.h10
-rw-r--r--src/3rdparty/v8/src/date.cc384
-rw-r--r--src/3rdparty/v8/src/date.h260
-rw-r--r--src/3rdparty/v8/src/date.js690
-rw-r--r--src/3rdparty/v8/src/debug-agent.cc31
-rw-r--r--src/3rdparty/v8/src/debug-agent.h1
-rw-r--r--src/3rdparty/v8/src/debug-debugger.js164
-rw-r--r--src/3rdparty/v8/src/debug.cc385
-rw-r--r--src/3rdparty/v8/src/debug.h13
-rw-r--r--src/3rdparty/v8/src/deoptimizer.cc406
-rw-r--r--src/3rdparty/v8/src/deoptimizer.h101
-rw-r--r--src/3rdparty/v8/src/double.h6
-rw-r--r--src/3rdparty/v8/src/dtoa.h6
-rw-r--r--src/3rdparty/v8/src/elements.cc1112
-rw-r--r--src/3rdparty/v8/src/elements.h116
-rw-r--r--src/3rdparty/v8/src/execution.cc71
-rw-r--r--src/3rdparty/v8/src/execution.h19
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/break-iterator.cc252
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/break-iterator.h89
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/collator.cc222
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/datetime-format.cc384
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/datetime-format.h83
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/experimental.gyp105
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/i18n-extension.cc74
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/i18n-js2c.py126
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/i18n-locale.cc111
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/i18n-utils.cc87
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/i18n.js380
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/language-matcher.cc252
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/language-matcher.h95
-rw-r--r--src/3rdparty/v8/src/extensions/experimental/number-format.cc374
-rw-r--r--src/3rdparty/v8/src/extensions/externalize-string-extension.cc7
-rw-r--r--src/3rdparty/v8/src/extensions/gc-extension.cc4
-rw-r--r--src/3rdparty/v8/src/factory.cc197
-rw-r--r--src/3rdparty/v8/src/factory.h54
-rw-r--r--src/3rdparty/v8/src/fast-dtoa.h4
-rw-r--r--src/3rdparty/v8/src/flag-definitions.h203
-rw-r--r--src/3rdparty/v8/src/flags.cc18
-rw-r--r--src/3rdparty/v8/src/flags.h3
-rw-r--r--src/3rdparty/v8/src/frames-inl.h28
-rw-r--r--src/3rdparty/v8/src/frames.cc149
-rw-r--r--src/3rdparty/v8/src/frames.h34
-rw-r--r--src/3rdparty/v8/src/full-codegen.cc294
-rw-r--r--src/3rdparty/v8/src/full-codegen.h111
-rw-r--r--src/3rdparty/v8/src/gdb-jit.cc59
-rw-r--r--src/3rdparty/v8/src/gdb-jit.h3
-rw-r--r--src/3rdparty/v8/src/global-handles.cc5
-rw-r--r--src/3rdparty/v8/src/global-handles.h8
-rw-r--r--src/3rdparty/v8/src/globals.h30
-rw-r--r--src/3rdparty/v8/src/handles.cc378
-rw-r--r--src/3rdparty/v8/src/handles.h73
-rw-r--r--src/3rdparty/v8/src/hashmap.cc224
-rw-r--r--src/3rdparty/v8/src/hashmap.h262
-rw-r--r--src/3rdparty/v8/src/heap-inl.h222
-rw-r--r--src/3rdparty/v8/src/heap-profiler.cc2
-rw-r--r--src/3rdparty/v8/src/heap-profiler.h2
-rw-r--r--src/3rdparty/v8/src/heap.cc1313
-rw-r--r--src/3rdparty/v8/src/heap.h545
-rw-r--r--src/3rdparty/v8/src/hydrogen-instructions.cc563
-rw-r--r--src/3rdparty/v8/src/hydrogen-instructions.h941
-rw-r--r--src/3rdparty/v8/src/hydrogen.cc2136
-rw-r--r--src/3rdparty/v8/src/hydrogen.h177
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32-inl.h14
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32.cc169
-rw-r--r--src/3rdparty/v8/src/ia32/assembler-ia32.h48
-rw-r--r--src/3rdparty/v8/src/ia32/builtins-ia32.cc202
-rw-r--r--src/3rdparty/v8/src/ia32/code-stubs-ia32.cc1697
-rw-r--r--src/3rdparty/v8/src/ia32/code-stubs-ia32.h14
-rw-r--r--src/3rdparty/v8/src/ia32/codegen-ia32.cc239
-rw-r--r--src/3rdparty/v8/src/ia32/codegen-ia32.h16
-rw-r--r--src/3rdparty/v8/src/ia32/cpu-ia32.cc2
-rw-r--r--src/3rdparty/v8/src/ia32/debug-ia32.cc56
-rw-r--r--src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc435
-rw-r--r--src/3rdparty/v8/src/ia32/disasm-ia32.cc47
-rw-r--r--src/3rdparty/v8/src/ia32/frames-ia32.h28
-rw-r--r--src/3rdparty/v8/src/ia32/full-codegen-ia32.cc650
-rw-r--r--src/3rdparty/v8/src/ia32/ic-ia32.cc133
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc1405
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h85
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc18
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-ia32.cc304
-rw-r--r--src/3rdparty/v8/src/ia32/lithium-ia32.h285
-rw-r--r--src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc495
-rw-r--r--src/3rdparty/v8/src/ia32/macro-assembler-ia32.h92
-rw-r--r--src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc69
-rw-r--r--src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h8
-rw-r--r--src/3rdparty/v8/src/ia32/stub-cache-ia32.cc479
-rw-r--r--src/3rdparty/v8/src/ic-inl.h12
-rw-r--r--src/3rdparty/v8/src/ic.cc418
-rw-r--r--src/3rdparty/v8/src/ic.h116
-rw-r--r--src/3rdparty/v8/src/incremental-marking-inl.h3
-rw-r--r--src/3rdparty/v8/src/incremental-marking.cc162
-rw-r--r--src/3rdparty/v8/src/incremental-marking.h26
-rw-r--r--src/3rdparty/v8/src/inspector.cc4
-rw-r--r--src/3rdparty/v8/src/inspector.h8
-rw-r--r--src/3rdparty/v8/src/interface.cc226
-rw-r--r--src/3rdparty/v8/src/interface.h156
-rw-r--r--src/3rdparty/v8/src/interpreter-irregexp.cc87
-rw-r--r--src/3rdparty/v8/src/isolate-inl.h10
-rw-r--r--src/3rdparty/v8/src/isolate.cc236
-rw-r--r--src/3rdparty/v8/src/isolate.h123
-rw-r--r--src/3rdparty/v8/src/json-parser.h9
-rw-r--r--src/3rdparty/v8/src/json.js2
-rw-r--r--src/3rdparty/v8/src/jsregexp.cc1027
-rw-r--r--src/3rdparty/v8/src/jsregexp.h154
-rw-r--r--src/3rdparty/v8/src/lazy-instance.h263
-rw-r--r--src/3rdparty/v8/src/list-inl.h4
-rw-r--r--src/3rdparty/v8/src/list.h2
-rw-r--r--src/3rdparty/v8/src/lithium-allocator.cc267
-rw-r--r--src/3rdparty/v8/src/lithium-allocator.h72
-rw-r--r--src/3rdparty/v8/src/lithium.cc35
-rw-r--r--src/3rdparty/v8/src/lithium.h98
-rw-r--r--src/3rdparty/v8/src/liveedit-debugger.js49
-rw-r--r--src/3rdparty/v8/src/liveedit.cc45
-rw-r--r--src/3rdparty/v8/src/liveobjectlist-inl.h4
-rw-r--r--src/3rdparty/v8/src/liveobjectlist.cc50
-rw-r--r--src/3rdparty/v8/src/liveobjectlist.h10
-rw-r--r--src/3rdparty/v8/src/log.cc50
-rw-r--r--src/3rdparty/v8/src/log.h18
-rw-r--r--src/3rdparty/v8/src/macro-assembler.h23
-rw-r--r--src/3rdparty/v8/src/macros.py45
-rw-r--r--src/3rdparty/v8/src/mark-compact-inl.h25
-rw-r--r--src/3rdparty/v8/src/mark-compact.cc1062
-rw-r--r--src/3rdparty/v8/src/mark-compact.h49
-rw-r--r--src/3rdparty/v8/src/math.js46
-rw-r--r--src/3rdparty/v8/src/messages.js185
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips-inl.h43
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips.cc57
-rw-r--r--src/3rdparty/v8/src/mips/assembler-mips.h119
-rw-r--r--src/3rdparty/v8/src/mips/builtins-mips.cc254
-rw-r--r--src/3rdparty/v8/src/mips/code-stubs-mips.cc2033
-rw-r--r--src/3rdparty/v8/src/mips/code-stubs-mips.h75
-rw-r--r--src/3rdparty/v8/src/mips/codegen-mips.cc184
-rw-r--r--src/3rdparty/v8/src/mips/codegen-mips.h15
-rw-r--r--src/3rdparty/v8/src/mips/constants-mips.cc4
-rw-r--r--src/3rdparty/v8/src/mips/constants-mips.h191
-rw-r--r--src/3rdparty/v8/src/mips/cpu-mips.cc14
-rw-r--r--src/3rdparty/v8/src/mips/debug-mips.cc50
-rw-r--r--src/3rdparty/v8/src/mips/deoptimizer-mips.cc357
-rw-r--r--src/3rdparty/v8/src/mips/disasm-mips.cc18
-rw-r--r--src/3rdparty/v8/src/mips/frames-mips.h43
-rw-r--r--src/3rdparty/v8/src/mips/full-codegen-mips.cc662
-rw-r--r--src/3rdparty/v8/src/mips/ic-mips.cc201
-rw-r--r--src/3rdparty/v8/src/mips/lithium-codegen-mips.cc1442
-rw-r--r--src/3rdparty/v8/src/mips/lithium-codegen-mips.h73
-rw-r--r--src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc67
-rw-r--r--src/3rdparty/v8/src/mips/lithium-mips.cc606
-rw-r--r--src/3rdparty/v8/src/mips/lithium-mips.h205
-rw-r--r--src/3rdparty/v8/src/mips/macro-assembler-mips.cc976
-rw-r--r--src/3rdparty/v8/src/mips/macro-assembler-mips.h235
-rw-r--r--src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc54
-rw-r--r--src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h8
-rw-r--r--src/3rdparty/v8/src/mips/simulator-mips.cc40
-rw-r--r--src/3rdparty/v8/src/mips/simulator-mips.h15
-rw-r--r--src/3rdparty/v8/src/mips/stub-cache-mips.cc567
-rw-r--r--src/3rdparty/v8/src/mirror-debugger.js298
-rw-r--r--src/3rdparty/v8/src/mksnapshot.cc2
-rw-r--r--src/3rdparty/v8/src/objects-debug.cc82
-rw-r--r--src/3rdparty/v8/src/objects-inl.h854
-rw-r--r--src/3rdparty/v8/src/objects-printer.cc71
-rw-r--r--src/3rdparty/v8/src/objects-visiting-inl.h12
-rw-r--r--src/3rdparty/v8/src/objects-visiting.cc3
-rw-r--r--src/3rdparty/v8/src/objects-visiting.h2
-rw-r--r--src/3rdparty/v8/src/objects.cc3334
-rw-r--r--src/3rdparty/v8/src/objects.h1528
-rw-r--r--src/3rdparty/v8/src/once.cc (renamed from src/3rdparty/v8/src/extensions/experimental/i18n-utils.h)76
-rw-r--r--src/3rdparty/v8/src/once.h123
-rw-r--r--src/3rdparty/v8/src/parser.cc1831
-rw-r--r--src/3rdparty/v8/src/parser.h247
-rw-r--r--src/3rdparty/v8/src/platform-cygwin.cc69
-rw-r--r--src/3rdparty/v8/src/platform-freebsd.cc59
-rw-r--r--src/3rdparty/v8/src/platform-linux.cc169
-rw-r--r--src/3rdparty/v8/src/platform-macos.cc55
-rw-r--r--src/3rdparty/v8/src/platform-nullos.cc37
-rw-r--r--src/3rdparty/v8/src/platform-openbsd.cc472
-rw-r--r--src/3rdparty/v8/src/platform-posix.cc60
-rw-r--r--src/3rdparty/v8/src/platform-posix.h (renamed from src/3rdparty/v8/src/extensions/experimental/i18n-natives.h)16
-rw-r--r--src/3rdparty/v8/src/platform-qnx.cc1072
-rw-r--r--src/3rdparty/v8/src/platform-solaris.cc176
-rw-r--r--src/3rdparty/v8/src/platform-win32.cc229
-rw-r--r--src/3rdparty/v8/src/platform.h87
-rw-r--r--src/3rdparty/v8/src/preparse-data.h12
-rw-r--r--src/3rdparty/v8/src/preparser-api.cc30
-rw-r--r--src/3rdparty/v8/src/preparser.cc163
-rw-r--r--src/3rdparty/v8/src/preparser.h97
-rw-r--r--src/3rdparty/v8/src/prettyprinter.cc525
-rw-r--r--src/3rdparty/v8/src/prettyprinter.h103
-rw-r--r--src/3rdparty/v8/src/profile-generator-inl.h23
-rw-r--r--src/3rdparty/v8/src/profile-generator.cc1248
-rw-r--r--src/3rdparty/v8/src/profile-generator.h196
-rw-r--r--src/3rdparty/v8/src/property-details.h132
-rw-r--r--src/3rdparty/v8/src/property.cc29
-rw-r--r--src/3rdparty/v8/src/property.h56
-rw-r--r--src/3rdparty/v8/src/proxy.js2
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-irregexp-inl.h10
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc36
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h8
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc109
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler-tracer.h7
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler.cc6
-rw-r--r--src/3rdparty/v8/src/regexp-macro-assembler.h18
-rw-r--r--src/3rdparty/v8/src/regexp.js113
-rw-r--r--src/3rdparty/v8/src/rewriter.cc55
-rw-r--r--src/3rdparty/v8/src/runtime-profiler.cc218
-rw-r--r--src/3rdparty/v8/src/runtime-profiler.h22
-rw-r--r--src/3rdparty/v8/src/runtime.cc2726
-rw-r--r--src/3rdparty/v8/src/runtime.h54
-rw-r--r--src/3rdparty/v8/src/runtime.js23
-rw-r--r--src/3rdparty/v8/src/safepoint-table.cc60
-rw-r--r--src/3rdparty/v8/src/safepoint-table.h78
-rw-r--r--src/3rdparty/v8/src/scanner-character-streams.cc86
-rw-r--r--src/3rdparty/v8/src/scanner-character-streams.h34
-rwxr-xr-xsrc/3rdparty/v8/src/scanner.cc17
-rw-r--r--src/3rdparty/v8/src/scanner.h111
-rw-r--r--src/3rdparty/v8/src/scopeinfo.cc734
-rw-r--r--src/3rdparty/v8/src/scopeinfo.h87
-rw-r--r--src/3rdparty/v8/src/scopes.cc565
-rw-r--r--src/3rdparty/v8/src/scopes.h170
-rw-r--r--src/3rdparty/v8/src/serialize.cc163
-rw-r--r--src/3rdparty/v8/src/serialize.h27
-rw-r--r--src/3rdparty/v8/src/small-pointer-list.h10
-rw-r--r--src/3rdparty/v8/src/spaces-inl.h38
-rw-r--r--src/3rdparty/v8/src/spaces.cc580
-rw-r--r--src/3rdparty/v8/src/spaces.h281
-rw-r--r--src/3rdparty/v8/src/store-buffer.cc161
-rw-r--r--src/3rdparty/v8/src/store-buffer.h25
-rw-r--r--src/3rdparty/v8/src/string-search.h20
-rw-r--r--src/3rdparty/v8/src/string-stream.cc37
-rw-r--r--src/3rdparty/v8/src/string.js82
-rw-r--r--src/3rdparty/v8/src/stub-cache.cc128
-rw-r--r--src/3rdparty/v8/src/stub-cache.h81
-rw-r--r--src/3rdparty/v8/src/token.h6
-rw-r--r--src/3rdparty/v8/src/type-info.cc193
-rw-r--r--src/3rdparty/v8/src/type-info.h36
-rw-r--r--src/3rdparty/v8/src/unicode-inl.h15
-rw-r--r--src/3rdparty/v8/src/unicode.cc1323
-rw-r--r--src/3rdparty/v8/src/unicode.h53
-rw-r--r--src/3rdparty/v8/src/uri.js10
-rw-r--r--src/3rdparty/v8/src/utils.h62
-rw-r--r--src/3rdparty/v8/src/v8-counters.h3
-rw-r--r--src/3rdparty/v8/src/v8.cc125
-rw-r--r--src/3rdparty/v8/src/v8.h14
-rw-r--r--src/3rdparty/v8/src/v8conversions.h4
-rw-r--r--src/3rdparty/v8/src/v8globals.h95
-rw-r--r--src/3rdparty/v8/src/v8memory.h6
-rw-r--r--src/3rdparty/v8/src/v8natives.js219
-rw-r--r--src/3rdparty/v8/src/v8threads.cc2
-rw-r--r--src/3rdparty/v8/src/v8threads.h2
-rw-r--r--src/3rdparty/v8/src/v8utils.cc6
-rw-r--r--src/3rdparty/v8/src/v8utils.h4
-rw-r--r--src/3rdparty/v8/src/variables.cc22
-rw-r--r--src/3rdparty/v8/src/variables.h36
-rw-r--r--src/3rdparty/v8/src/version.cc8
-rw-r--r--src/3rdparty/v8/src/win32-headers.h2
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64-inl.h18
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64.cc203
-rw-r--r--src/3rdparty/v8/src/x64/assembler-x64.h80
-rw-r--r--src/3rdparty/v8/src/x64/builtins-x64.cc275
-rw-r--r--src/3rdparty/v8/src/x64/code-stubs-x64.cc1628
-rw-r--r--src/3rdparty/v8/src/x64/code-stubs-x64.h14
-rw-r--r--src/3rdparty/v8/src/x64/codegen-x64.cc252
-rw-r--r--src/3rdparty/v8/src/x64/codegen-x64.h15
-rw-r--r--src/3rdparty/v8/src/x64/cpu-x64.cc4
-rw-r--r--src/3rdparty/v8/src/x64/debug-x64.cc55
-rw-r--r--src/3rdparty/v8/src/x64/deoptimizer-x64.cc425
-rw-r--r--src/3rdparty/v8/src/x64/disasm-x64.cc30
-rw-r--r--src/3rdparty/v8/src/x64/frames-x64.h27
-rw-r--r--src/3rdparty/v8/src/x64/full-codegen-x64.cc660
-rw-r--r--src/3rdparty/v8/src/x64/ic-x64.cc129
-rw-r--r--src/3rdparty/v8/src/x64/lithium-codegen-x64.cc1315
-rw-r--r--src/3rdparty/v8/src/x64/lithium-codegen-x64.h59
-rw-r--r--src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc10
-rw-r--r--src/3rdparty/v8/src/x64/lithium-x64.cc599
-rw-r--r--src/3rdparty/v8/src/x64/lithium-x64.h222
-rw-r--r--src/3rdparty/v8/src/x64/macro-assembler-x64.cc520
-rw-r--r--src/3rdparty/v8/src/x64/macro-assembler-x64.h114
-rw-r--r--src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc62
-rw-r--r--src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h8
-rw-r--r--src/3rdparty/v8/src/x64/stub-cache-x64.cc414
-rw-r--r--src/3rdparty/v8/src/zone-inl.h27
-rw-r--r--src/3rdparty/v8/src/zone.cc6
-rw-r--r--src/3rdparty/v8/src/zone.h23
-rw-r--r--src/3rdparty/v8/test/cctest/SConscript8
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.gyp5
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.h2
-rw-r--r--src/3rdparty/v8/test/cctest/cctest.status10
-rw-r--r--src/3rdparty/v8/test/cctest/test-alloc.cc14
-rw-r--r--src/3rdparty/v8/test/cctest/test-api.cc1127
-rw-r--r--src/3rdparty/v8/test/cctest/test-assembler-ia32.cc68
-rw-r--r--src/3rdparty/v8/test/cctest/test-assembler-mips.cc27
-rw-r--r--src/3rdparty/v8/test/cctest/test-assembler-x64.cc95
-rw-r--r--src/3rdparty/v8/test/cctest/test-ast.cc5
-rw-r--r--src/3rdparty/v8/test/cctest/test-compiler.cc5
-rw-r--r--src/3rdparty/v8/test/cctest/test-cpu-profiler.cc4
-rw-r--r--src/3rdparty/v8/test/cctest/test-dataflow.cc31
-rw-r--r--src/3rdparty/v8/test/cctest/test-date.cc168
-rw-r--r--src/3rdparty/v8/test/cctest/test-debug.cc214
-rw-r--r--src/3rdparty/v8/test/cctest/test-deoptimization.cc135
-rw-r--r--src/3rdparty/v8/test/cctest/test-disasm-arm.cc14
-rw-r--r--src/3rdparty/v8/test/cctest/test-disasm-ia32.cc5
-rw-r--r--src/3rdparty/v8/test/cctest/test-disasm-mips.cc110
-rw-r--r--src/3rdparty/v8/test/cctest/test-disasm-x64.cc429
-rw-r--r--src/3rdparty/v8/test/cctest/test-hashing.cc260
-rw-r--r--src/3rdparty/v8/test/cctest/test-heap-profiler.cc453
-rw-r--r--src/3rdparty/v8/test/cctest/test-heap.cc499
-rw-r--r--src/3rdparty/v8/test/cctest/test-lockers.cc89
-rw-r--r--src/3rdparty/v8/test/cctest/test-log-stack-tracer.cc1
-rw-r--r--src/3rdparty/v8/test/cctest/test-mark-compact.cc114
-rwxr-xr-xsrc/3rdparty/v8/test/cctest/test-parsing.cc511
-rw-r--r--src/3rdparty/v8/test/cctest/test-platform-linux.cc2
-rw-r--r--src/3rdparty/v8/test/cctest/test-platform-win32.cc2
-rw-r--r--src/3rdparty/v8/test/cctest/test-random.cc109
-rw-r--r--src/3rdparty/v8/test/cctest/test-regexp.cc6
-rw-r--r--src/3rdparty/v8/test/cctest/test-serialize.cc13
-rw-r--r--src/3rdparty/v8/test/cctest/test-sockets.cc2
-rw-r--r--src/3rdparty/v8/test/cctest/test-spaces.cc33
-rw-r--r--src/3rdparty/v8/test/cctest/test-strings.cc35
-rw-r--r--src/3rdparty/v8/test/cctest/test-thread-termination.cc4
-rw-r--r--src/3rdparty/v8/test/cctest/test-utils.cc2
-rw-r--r--src/3rdparty/v8/test/es5conform/es5conform.status5
-rw-r--r--src/3rdparty/v8/test/message/message.status7
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-construct-transition.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-join.js10
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-literal-transitions.js87
-rw-r--r--src/3rdparty/v8/test/mjsunit/array-store-and-grow.js183
-rw-r--r--src/3rdparty/v8/test/mjsunit/bugs/bug-618.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/builtins.js5
-rw-r--r--src/3rdparty/v8/test/mjsunit/comparison-ops-and-undefined.js128
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js308
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/alloc-object.js90
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js90
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-arity-mismatch.js62
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js152
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js50
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/lazy-const-lookup.js41
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/literals-optimized.js121
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/math-floor-global.js161
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/math-floor-local.js161
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/optimized-for-in.js300
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-106351.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-funarguments.js20
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-funcaller.js11
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-lazy-deopt.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/compiler/regress-toint32.js45
-rw-r--r--src/3rdparty/v8/test/mjsunit/count-based-osr.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/d8-os.js130
-rw-r--r--src/3rdparty/v8/test/mjsunit/date.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-break-inline.js100
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js187
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js173
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-scopes.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-set-script-source.js64
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-setbreakpoint.js12
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-step-3.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepin-accessor.js4
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepin-function-call.js17
-rw-r--r--src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js423
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-kind.js61
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js211
-rw-r--r--src/3rdparty/v8/test/mjsunit/elements-transition.js12
-rw-r--r--src/3rdparty/v8/test/mjsunit/external-array.js80
-rw-r--r--src/3rdparty/v8/test/mjsunit/function-call.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/function-named-self-reference.js (renamed from src/3rdparty/v8/src/extensions/experimental/i18n-extension.h)33
-rw-r--r--src/3rdparty/v8/test/mjsunit/fuzz-natives.js8
-rw-r--r--src/3rdparty/v8/test/mjsunit/get-own-property-descriptor.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/getter-in-value-prototype.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js5
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-const-assign.js131
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-early-errors.js55
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-for.js16
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-leave.js82
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js201
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-let-declaration.js6
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-let-semantics.js5
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/block-scoping.js3
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/collections.js51
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js115
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/debug-evaluate-blockscopes.js8
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js159
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js139
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/proxies-example-membrane.js512
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/proxies-function.js256
-rw-r--r--src/3rdparty/v8/test/mjsunit/harmony/proxies.js86
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-min-max.js72
-rw-r--r--src/3rdparty/v8/test/mjsunit/math-pow.js257
-rw-r--r--src/3rdparty/v8/test/mjsunit/mjsunit.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/mjsunit.status51
-rw-r--r--src/3rdparty/v8/test/mjsunit/number-is.js58
-rw-r--r--src/3rdparty/v8/test/mjsunit/object-define-property.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/object-is.js47
-rw-r--r--src/3rdparty/v8/test/mjsunit/object-prevent-extensions.js12
-rw-r--r--src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js44
-rw-r--r--src/3rdparty/v8/test/mjsunit/regexp.js17
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-102153.js57
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-103259.js36
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-108296.js (renamed from src/3rdparty/v8/src/extensions/experimental/i18n-locale.h)58
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-109195.js65
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-110509.js41
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1110.js5
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-113924.js31
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-115452.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-117794.js57
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-119429.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-119925.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-120099.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-121407.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1229.js109
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1530.js69
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1624-strict.js140
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1624.js139
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1790.js (renamed from src/3rdparty/v8/src/extensions/experimental/number-format.h)73
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1849.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1853.js116
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1878.js44
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1898.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1924.js42
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1945.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1969.js5045
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1973.js52
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-1980.js40
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2027.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2030.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2032.js64
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2034.js46
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2045.js49
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2054.js34
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2055.js48
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2056.js66
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-2058.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-397.js17
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-91517.js112
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-95113.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-97116.js (renamed from src/3rdparty/v8/src/extensions/experimental/collator.h)52
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-100859.js39
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-107996.js64
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js33
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js47
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-inlining-function-literal-context.js53
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js37
-rw-r--r--src/3rdparty/v8/test/mjsunit/regress/regress-sqrt.js47
-rw-r--r--src/3rdparty/v8/test/mjsunit/string-external-cached.js121
-rw-r--r--src/3rdparty/v8/test/mjsunit/string-externalize.js2
-rw-r--r--src/3rdparty/v8/test/mjsunit/string-replace-one-char.js92
-rw-r--r--src/3rdparty/v8/test/mjsunit/string-slices-regexp.js5
-rwxr-xr-xsrc/3rdparty/v8/test/mjsunit/string-slices.js55
-rw-r--r--src/3rdparty/v8/test/mjsunit/switch.js173
-rw-r--r--src/3rdparty/v8/test/mjsunit/tools/tickprocessor.js7
-rw-r--r--src/3rdparty/v8/test/mjsunit/undeletable-functions.js38
-rw-r--r--src/3rdparty/v8/test/mjsunit/unicodelctest-no-optimization.js4914
-rw-r--r--src/3rdparty/v8/test/mjsunit/unicodelctest.js4912
-rw-r--r--src/3rdparty/v8/test/mozilla/mozilla.status77
-rw-r--r--src/3rdparty/v8/test/preparser/preparser.status6
-rw-r--r--src/3rdparty/v8/test/sputnik/sputnik.status68
-rw-r--r--src/3rdparty/v8/test/test262/README4
-rw-r--r--src/3rdparty/v8/test/test262/test262.status396
-rw-r--r--src/3rdparty/v8/test/test262/testcfg.py29
-rw-r--r--src/3rdparty/v8/tools/bash-completion.sh55
-rw-r--r--src/3rdparty/v8/tools/check-static-initializers.sh55
-rw-r--r--src/3rdparty/v8/tools/common-includes.sh204
-rw-r--r--src/3rdparty/v8/tools/disasm.py3
-rwxr-xr-xsrc/3rdparty/v8/tools/gc-nvp-trace-processor.py8
-rw-r--r--src/3rdparty/v8/tools/gcmole/gcmole.cc29
-rw-r--r--src/3rdparty/v8/tools/gcmole/gcmole.lua9
-rw-r--r--src/3rdparty/v8/tools/gen-postmortem-metadata.py481
-rwxr-xr-xsrc/3rdparty/v8/tools/grokdump.py441
-rw-r--r--src/3rdparty/v8/tools/gyp/v8.gyp150
-rw-r--r--src/3rdparty/v8/tools/js2c.py9
-rw-r--r--src/3rdparty/v8/tools/jsmin.py6
-rwxr-xr-xsrc/3rdparty/v8/tools/ll_prof.py9
-rw-r--r--src/3rdparty/v8/tools/merge-to-branch.sh280
-rwxr-xr-xsrc/3rdparty/v8/tools/presubmit.py63
-rwxr-xr-xsrc/3rdparty/v8/tools/push-to-trunk.sh385
-rwxr-xr-xsrc/3rdparty/v8/tools/test-wrapper-gypbuild.py10
-rwxr-xr-xsrc/3rdparty/v8/tools/test.py47
-rw-r--r--src/3rdparty/v8/tools/tickprocessor-driver.js1
-rw-r--r--src/3rdparty/v8/tools/tickprocessor.js14
-rw-r--r--src/3rdparty/v8/tools/utils.py2
579 files changed, 79389 insertions, 32752 deletions
diff --git a/src/3rdparty/v8/.gitignore b/src/3rdparty/v8/.gitignore
index b61faef..088daea 100644
--- a/src/3rdparty/v8/.gitignore
+++ b/src/3rdparty/v8/.gitignore
@@ -23,10 +23,10 @@ shell_g
/build/gyp
/obj/
/out/
-/test/es5conform/data/
-/test/mozilla/data/
-/test/sputnik/sputniktests/
-/test/test262/data/
+/test/es5conform/data
+/test/mozilla/data
+/test/sputnik/sputniktests
+/test/test262/data
/tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o
/tools/visual_studio/Debug
diff --git a/src/3rdparty/v8/AUTHORS b/src/3rdparty/v8/AUTHORS
index 148be41..6e46b3d 100644
--- a/src/3rdparty/v8/AUTHORS
+++ b/src/3rdparty/v8/AUTHORS
@@ -8,6 +8,7 @@ Sigma Designs Inc.
ARM Ltd.
Hewlett-Packard Development Company, LP
Igalia, S.L.
+Joyent, Inc.
Akinori MUSHA <knu@FreeBSD.org>
Alexander Botero-Lowry <alexbl@FreeBSD.org>
@@ -22,14 +23,19 @@ Daniel James <dnljms@gmail.com>
Dineel D Sule <dsule@codeaurora.org>
Erich Ocean <erich.ocean@me.com>
Fedor Indutny <fedor@indutny.com>
+Filipe David Manana <fdmanana@gmail.com>
+Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com>
Jan de Mooij <jandemooij@gmail.com>
Jay Freeman <saurik@saurik.com>
Joel Stanley <joel.stan@gmail.com>
John Jozwiak <jjozwiak@codeaurora.org>
+Jonathan Liu <net147@gmail.com>
Kun Zhang <zhangk@codeaurora.org>
Martyn Capewell <martyn.capewell@arm.com>
+Mathias Bynens <mathias@qiwi.be>
Matt Hanselman <mjhanselman@gmail.com>
Maxim Mossienko <maxim.mossienko@gmail.com>
+Michael Lutz <michi@icosahedron.de>
Michael Smith <mike@w3.org>
Mike Gilbert <floppymaster@gmail.com>
Paolo Giarrusso <p.giarrusso@gmail.com>
@@ -46,3 +52,4 @@ Tobias Burnus <burnus@net-b.de>
Vlad Burlik <vladbph@gmail.com>
Yuqiang Xian <yuqiang.xian@intel.com>
Zaheer Ahmad <zahmad@codeaurora.org>
+Zhongping Wang <kewpie.w.zp@gmail.com>
diff --git a/src/3rdparty/v8/ChangeLog b/src/3rdparty/v8/ChangeLog
index d53b448..25eaf56 100644
--- a/src/3rdparty/v8/ChangeLog
+++ b/src/3rdparty/v8/ChangeLog
@@ -1,3 +1,577 @@
+2012-04-10: Version 3.10.1
+
+ Fixed bug with arguments object in inlined functions (issue 2045).
+
+ Fixed performance bug with lazy initialization (Chromium issue
+ 118686).
+
+ Added suppport for Mac OS X 64bit builds with GYP.
+ (Patch contributed by Filipe David Manana <fdmanana@gmail.com>)
+
+ Fixed bug with hidden properties (issue 2034).
+
+ Fixed a performance bug when reloading pages (Chromium issue 117767,
+ V8 issue 1902).
+
+ Fixed bug when optimizing throw in top-level code (issue 2054).
+
+ Fixed two bugs with array literals (issue 2055, Chromium issue 121407).
+
+ Fixed bug with Math.min/Math.max with NaN inputs (issue 2056).
+
+ Fixed a bug with the new runtime profiler (Chromium issue 121147).
+
+ Fixed compilation of V8 using uClibc.
+
+ Optimized boot-up memory use.
+
+ Optimized regular expressions.
+
+
+2012-03-30: Version 3.10.0
+
+ Fixed store IC writability check in strict mode
+ (Chromium issue 120099).
+
+ Resynchronize timers if the Windows system time was changed.
+ (Chromium issue 119815)
+
+ Removed "-mfloat-abi=hard" from host compiler cflags when building for
+ hardfp ARM
+ (https://code.google.com/p/chrome-os-partner/issues/detail?id=8539)
+
+ Fixed edge case for case independent regexp character classes
+ (issue 2032).
+
+ Reset function info counters after context disposal.
+ (Chromium issue 117767, V8 issue 1902)
+
+ Fixed missing write barrier in CopyObjectToObjectElements.
+ (Chromium issue 119926)
+
+ Fixed missing bounds check in HasElementImpl.
+ (Chromium issue 119925)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-23: Version 3.9.24
+
+ Activated count-based profiler for ARM.
+
+ Fixed use of proxies as f.prototype properties. (issue 2021)
+
+ Enabled snapshots on MIPS.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-21: Version 3.9.23
+
+ Use correct arguments adaptation environment when inlining function
+ containing arguments. (Issue 2014)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-20: Version 3.9.22
+
+ Enabled count-based profiler by default.
+
+ Implemented a hash based look-up to speed up address checks
+ in large object space (issue 853).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-19: Version 3.9.21
+
+ Fixed push-to-trunk script (and re-push).
+
+ Added API call that identifies strings that are guaranteed only to
+ contain ASCII characters.
+
+
+2012-03-19: Version 3.9.20
+
+ Fixed declarations escaping global strict eval. (Issue 1624)
+
+ Fixed wrapping of receiver for non-strict callbacks. (Issue 1973)
+
+ Fixed function declarations overwriting read-only global properties.
+ (Chromium issue 115452)
+
+ Fixed --use-strict flag in combination with --harmony[-scoping].
+
+ Debugger: naive implementation of "step into Function.prototype.bind".
+
+ Debugger: added ability to set script source from within OnBeforeCompile
+
+ Added flag to always call DebugBreak on abort.
+
+ Re-enabled constructor inlining and inline === comparison with boolean
+ constants. (Issue 2009)
+
+ Don't use an explicit s0 in ClampDoubleToUint8. (Issue 2004)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-14: Version 3.9.19
+
+ Ensure there is a smi check of the receiver for global load and call
+ ICs (Chromium issue 117794).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-13: Version 3.9.18
+
+ Ensure consistency of Math.sqrt on Intel platforms.
+
+ Remove static initializers in v8. (issue 1859)
+
+ Add explicit dependency on v8_base in the GYP-based build.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-12: Version 3.9.17
+
+ Fixed VFP detection through compiler defines. (issue 1996)
+
+ Add Code-related fields to postmortem metadata.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-09: Version 3.9.16
+
+ Added basic interface inference for modules (behind the --harmony flag).
+
+ Added Object.is, Number.isFinite, Number.isNaN.
+
+ Updated the Unicode tables to Unicode version 6.1.0.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-06: Version 3.9.15
+
+ Fix the heap profiler crash caused by memory layout changes between
+ passes.
+
+ Fix Error.prototype.toString to throw TypeError. (issue 1980)
+
+ Fix double-rounding in strtod for MinGW. (issue 1062)
+
+ Fix corrupted snapshot serializaton on ia32. (Chromium issue v8/1985)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-03-01: Version 3.9.14
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-29: Version 3.9.13
+
+ Added code kind check before preparing for OSR. (issue 1900, 115073)
+
+ Fixed issue 1802: Pass zone explicitly to zone-allocation on x64 and
+ ARM.
+
+ Ported string construct stub to x64. (issue 849)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-28: Version 3.9.12
+
+ Fixed the negative lookup stub to handle deleted entries in a
+ dictionary. (issue 1964)
+
+ Added a new API where the host can supply a callback function. The
+ callback function can resolve the location of a return address on stack
+ to the location where a return-address rewriting profiler stashed the
+ original return address.
+
+ Fixed Chromium issue http://crbug.com/115646: When compiling for-in
+ pass correct context value to the increment instruction.
+
+ Fixed issue 1853: Update breakpoints set with partial file name after
+ compile.
+
+
+2012-02-27: Version 3.9.11
+
+ Made 'module' a context-sensitive keyword (V8 issue 1957).
+
+
+2012-02-24: Version 3.9.10
+
+ Fixed V8 issues 1322, 1772 and 1969.
+
+ Conformance improvements.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-23: Version 3.9.9
+
+ Supported fast case for-in in Crankshaft.
+
+ Sped up heap snapshot serialization and dominators construction.
+
+ Randomized allocation addresses on windows. (Chromium issue 115151)
+
+ Fixed compilation with MinGW-w64. (issue 1943)
+
+ Fixed incorrect value of assignments to non-extensible properties.
+
+ Fixed a crash bug in generated code on ia32.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-21: Version 3.9.8
+
+ Fixed memory leak and missing #include in StartupDataDecompressor
+ (issue 1960).
+
+ Renamed static methods to avoid shadowing virtual methods and fix Clang
+ C++11 compile error.
+
+ Fixed sequence of element access in array builtins (issue 1790).
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-16: Version 3.9.7
+
+ Fixed V8 issues 1322, 1878, 1942, 1945 and Chromium issue 113924.
+
+ Fixed GCC-4.7 warnings.
+
+ Added Navier-Stokes benchmark.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-14: Version 3.9.6
+
+ Fixed template-related linker error. (issue 1936)
+
+ Allowed inlining of functions containing object literals. (issue 1322)
+
+ Added --call-graph-size option to tickprocessor. (issue 1937)
+
+ Heap Snapshot maximum size limit is too low for really big apps. At the
+ moment the limit is 256MB. (Chromium issue 113015)
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-09: Version 3.9.5
+
+ Removed unused command line flags.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-08: Version 3.9.4
+
+ Properly initialize element-transitioning array literals on ARM.
+ (issue 1930)
+
+ Bug fixes on all platforms.
+
+
+2012-02-07: Version 3.9.3
+
+ When rethrowing an exception, print the stack trace of its original
+ site instead of rethrow site (Chromium issue 60240).
+
+ Increased size of small stacks from 32k to 64k to avoid hitting limits
+ in Chromium (Chromium issue 112843).
+
+
+2012-02-06: Version 3.9.2
+
+ Added timestamp to --trace-gc output. (issue 1932)
+
+ Heap profiler reports implicit references.
+
+ Optionally export metadata with libv8 to enable debuggers to inspect V8
+ state.
+
+
+2012-02-02: Version 3.9.1
+
+ Fixed memory leak in NativeObjectsExplorer::FindOrAddGroupInfo
+ (Chromium issue 112315).
+
+ Fixed a crash in dev tools (Chromium issue 107996).
+
+ Added 'dependencies_traverse': 1 to v8 GYP target.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-02-01: Version 3.9.0
+
+ Reduced memory use immediately after starting V8.
+
+ Stability fixes and performance improvements on all platforms.
+
+
+2012-01-26: Version 3.8.9
+
+ Flush number string cache on GC (issue 1605).
+
+ Provide access to function inferred name with
+ v8::Function::GetInferredName in V8 public API.
+
+ Fix building with Clang (issue 1912).
+
+ Reduce the space used by the stack for the profiling thread.
+
+ Fix misleading documentation of v8::Locker (issue 542).
+
+ Introduce readbinary function in d8 to read binary files.
+
+ Performance and stability improvements on all platforms.
+
+
+2012-01-23: Version 3.8.8
+
+ Limited number of loop iterations in Heap::ReserveSpace
+ (Chromium issue 99027).
+
+ Fixed solaris build (VirtualMemory) (issue 1761).
+
+ Fixed strict vs. non-strict handling of function proxies in
+ higher-order array and string methods.
+
+ Enabled asynchronous remote debugging with d8 (issue 1691).
+
+ Stability and performance improvements on all platforms.
+
+
+2012-01-19: Version 3.8.7
+
+ Ensure that LRandom restores rsi after call to the C function on x64.
+ (Chromium issue http://crbug.com/110509)
+
+ Fixing include issues on *bsd when building with scons.
+ (issue 1897)
+
+ Provide a switch to specify -fno-strict-aliasing
+ (issue 1887)
+
+ Move WIN32 define from standalone.gypi to common.gypi
+ (issue 1760)
+
+ Fix corner-case in heap size estimation.
+ (issue 1893)
+
+ Fix and enable NEW_NON_STRICT_FAST ArgumentsAccess stub on x64.
+ (issue 1903)
+
+ Performance improvements and bug fixes.
+
+
+2012-01-16: Version 3.8.6
+
+ Add primitive WebGL array support to d8.
+
+ Improve heap size estimation (issue 1893).
+
+ Hash collision DOS workaround extended from string keys
+ to numeric keys.
+
+ Provide an API for iterating through all external strings referenced
+ from the JS heap.
+
+ Adjust position recorded for call expressions. http://crbug.com/109195
+
+ Fix GC crash related to instanceof. http://crbug.com/109448
+
+ Performance improvements and bug fixes.
+
+
+2012-01-05: Version 3.8.5
+
+ Fix broken test that assumes that no GC can clear the regexp cache (GC
+ can happen at any time due to Crankshaft).
+
+ Fix handling of bogus receivers for Harmony collections. (issue 1884)
+
+ Add netbsd support to gyp build.
+
+ Determine page size at runtime on posix platforms.
+
+ Ensure that store buffer filtering hash sets are cleared after
+ StoreBuffer::Filter.
+
+ Randomize the seed used for string hashing. This helps guard against
+ CPU-eating DOS attacks against node.js servers. Based on code from
+ Bert Belder. This version only solves the issue for those that compile
+ V8 themselves or those that do not use snapshots. A snapshot-based
+ precompiled V8 will still have predictable string hash codes.
+
+ Implement callback when script finishes running in V8 API.
+
+ Improve performance of Math.min and Math.max for the case of two
+ arguments. (issue 1325)
+
+
+2012-01-02: Version 3.8.4
+
+ Performance improvements for large Smi-only arrays.
+
+ Fixed InternalArrays construction. (issue 1878)
+
+
+2011-12-27: Version 3.8.3
+
+ Avoid embedding new space objects into code objects in the lithium gap
+ resolver. (chromium:108296)
+
+ Bug fixes and performance optimizations on all platforms.
+
+
+2011-12-21: Version 3.8.2
+
+ Add max optimization flag to v8 gyp build to ensure V8 is always built
+ fully optimized in Chrome.
+
+ MIPS: Bring MIPS to parity with other platforms.
+
+ Optimizations and stability improvements on all platforms.
+
+
+2011-12-19: Version 3.8.1
+
+ Fixed GCC 4.7 warnings. Patch from Tobias Burnus.
+
+ Stability improvements on all platforms.
+
+
+2011-12-13: Version 3.8.0
+
+ Fixed handling of arrays in DefineOwnProperty. (issue 1756)
+
+ Sync parser and preparser on do-while and return statements.
+ (issue 1856)
+
+ Fixed another corner case for DefineOwnProperty on arrays (issue 1756).
+
+ Stability and performance improvements on all platforms.
+
+
+2011-12-01: Version 3.7.12
+
+ Increase tick interval for the android platform.
+
+ Fix a bug in the register allocator. (chromium:105112)
+
+ Fix handling of recompiling code. (chromium:105375, v8:1782)
+
+ Start incremental marking on idle notification. (v8:1458)
+
+ Build fixes for various platforms.
+
+ Various performance improvements.
+
+
+2011-11-29: Version 3.7.11
+
+ Fixed bug when generating padding to ensure space for lazy
+ deoptimization.
+ (issue 1846)
+
+ Further reduced pause times due to GC.
+
+ Stability and performance improvements on all platforms.
+
+
+2011-11-23: Version 3.7.10
+
+ Set maximum length of FixedArray in terms of elements instead an
+ absolute number of bytes.
+ (Chromium issue 103103)
+
+ Stability and performance improvements on all platforms.
+
+
+2011-11-21: Version 3.7.9
+
+ Removed exit-time destructors.
+
+ Stability and performance improvements on all platforms.
+
+
+2011-11-17: Version 3.7.8
+
+ Removed hidden prototype from builtins, i.e., deleting an overridden
+ function on builtins will not make the original function reappear.
+
+ Added NetBSD support for scons build.
+
+ Performance improvements on all platforms.
+
+
+2011-11-14: Version 3.7.7
+
+ Fix missing fast property accessors in heap snapshots.
+ (issue 1818)
+
+
+2011-11-11: Version 3.7.6
+
+ Fixed filtering of store buffer for large object pages.
+ (issue 1817)
+
+ Fixed generated hash function on all platforms.
+ (issue 1808)
+
+ Fixed Heap::Shrink to ensure that it does not free pages that are
+ still in use.
+ (Chromium issue 100414)
+
+ Stability and performance improvements on all platforms.
+
+
+2011-11-10: Version 3.7.5
+
+ Added initial gyp infrastructure for MIPS.
+
+ Implemented performance improvements to the incremental garbage
+ collector.
+
+ Added optimizations and stability improvements on all platforms.
+
+
+2011-11-07: Version 3.7.4
+
+ Proper "libv8.so.3.7.4" SONAME for Linux shared library (issue 1786).
+
+ Fix Harmony sets and maps to allow null and undefined as keys
+ (still hidden behind --harmony flag) (issue 1622).
+
+ Implement VirtualMemory on FreeBSD to fix build (issue 1807).
+
+ Enable VFP instructions for Android.
+
+ Fix error handling in Date.prototype.toISOString (issue 1792).
+
+ Bug fixes and performance improvements for all platforms.
+
+ Not officially supported but noteworthy: Crankshaft for MIPS :-)
+
+
2011-10-28: Version 3.7.3
Slight deoptimization as a workaround for issue with jslint: Issue
@@ -321,7 +895,7 @@
Fix the debugger for strict-mode functions. (Chromium issue 89236)
- Add GetPropertyAttribute method for Object in the API. (Patch by
+ Add GetPropertyAttribute method for Object in the API. (Patch by
Peter Varga)
Fix -Wunused-but-set-variable for gcc-4.6 on x64. (Issue 1291)
diff --git a/src/3rdparty/v8/LICENSE b/src/3rdparty/v8/LICENSE
index e435050..2e516ba 100644
--- a/src/3rdparty/v8/LICENSE
+++ b/src/3rdparty/v8/LICENSE
@@ -14,7 +14,9 @@ are:
- Strongtalk assembler, the basis of the files assembler-arm-inl.h,
assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
- assembler-ia32.cc, assembler-ia32.h, assembler.cc and assembler.h.
+ assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
+ assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h,
+ assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
This code is copyrighted by Sun Microsystems Inc. and released
under a 3-clause BSD license.
@@ -24,7 +26,7 @@ are:
These libraries have their own licenses; we recommend you read them,
as their terms may differ from the terms below.
-Copyright 2006-2011, the V8 project authors. All rights reserved.
+Copyright 2006-2012, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
diff --git a/src/3rdparty/v8/Makefile b/src/3rdparty/v8/Makefile
index 76f45d7..da1d688 100644
--- a/src/3rdparty/v8/Makefile
+++ b/src/3rdparty/v8/Makefile
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -27,12 +27,14 @@
# Variable default definitions. Override them by exporting them in your shell.
-CXX ?= "g++" # For distcc: export CXX="distcc g++"
-LINK ?= "g++"
+CXX ?= g++
+LINK ?= g++
OUTDIR ?= out
TESTJOBS ?= -j16
GYPFLAGS ?=
TESTFLAGS ?=
+ANDROID_NDK_ROOT ?=
+ANDROID_TOOL_PREFIX = $(ANDROID_NDK_ROOT)/toolchain/bin/arm-linux-androideabi
# Special build flags. Use them like this: "make library=shared"
@@ -73,6 +75,10 @@ ifeq ($(vfp3), off)
else
GYPFLAGS += -Dv8_can_use_vfp_instructions=true
endif
+# debuggersupport=off
+ifeq ($(debuggersupport), off)
+ GYPFLAGS += -Dv8_enable_debugger_support=0
+endif
# soname_version=1.2.3
ifdef soname_version
GYPFLAGS += -Dsoname_version=$(soname_version)
@@ -85,6 +91,10 @@ endif
ifeq ($(presubmit), no)
TESTFLAGS += --no-presubmit
endif
+# strictaliasing=off (workaround for GCC-4.5)
+ifeq ($(strictaliasing), off)
+ GYPFLAGS += -Dv8_no_strict_aliasing=1
+endif
# ----------------- available targets: --------------------
# - "dependencies": pulls in external dependencies (currently: GYP)
@@ -93,7 +103,8 @@ endif
# - every combination <arch>.<mode>, e.g. "ia32.release"
# - "native": current host's architecture, release mode
# - any of the above with .check appended, e.g. "ia32.release.check"
-# - default (no target specified): build all ARCHES and MODES
+# - "android": cross-compile for Android/ARM (release mode)
+# - default (no target specified): build all DEFAULT_ARCHES and MODES
# - "check": build all targets and run all tests
# - "<arch>.clean" for any <arch> in ARCHES
# - "clean": clean all ARCHES
@@ -102,7 +113,8 @@ endif
# Architectures and modes to be compiled. Consider these to be internal
# variables, don't override them (use the targets instead).
-ARCHES = ia32 x64 arm
+ARCHES = ia32 x64 arm mips
+DEFAULT_ARCHES = ia32 x64 arm
MODES = release debug
# List of files that trigger Makefile regeneration:
@@ -119,14 +131,15 @@ ENVFILE = $(OUTDIR)/environment
.PHONY: all check clean dependencies $(ENVFILE).new native \
$(ARCHES) $(MODES) $(BUILDS) $(CHECKS) $(addsuffix .clean,$(ARCHES)) \
- $(addsuffix .check,$(MODES)) $(addsuffix .check,$(ARCHES))
+ $(addsuffix .check,$(MODES)) $(addsuffix .check,$(ARCHES)) \
+ must-set-ANDROID_NDK_ROOT
# Target definitions. "all" is the default.
all: $(MODES)
# Compile targets. MODES and ARCHES are convenience targets.
.SECONDEXPANSION:
-$(MODES): $(addsuffix .$$@,$(ARCHES))
+$(MODES): $(addsuffix .$$@,$(DEFAULT_ARCHES))
$(ARCHES): $(addprefix $$@.,$(MODES))
@@ -143,9 +156,22 @@ native: $(OUTDIR)/Makefile-native
CXX="$(CXX)" LINK="$(LINK)" BUILDTYPE=Release \
builddir="$(shell pwd)/$(OUTDIR)/$@"
+# TODO(jkummerow): add "android.debug" when we need it.
+android android.release: $(OUTDIR)/Makefile-android
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile-android \
+ CXX="$(ANDROID_TOOL_PREFIX)-g++" \
+ AR="$(ANDROID_TOOL_PREFIX)-ar" \
+ RANLIB="$(ANDROID_TOOL_PREFIX)-ranlib" \
+ CC="$(ANDROID_TOOL_PREFIX)-gcc" \
+ LD="$(ANDROID_TOOL_PREFIX)-ld" \
+ LINK="$(ANDROID_TOOL_PREFIX)-g++" \
+ BUILDTYPE=Release \
+ builddir="$(shell pwd)/$(OUTDIR)/android.release"
+
# Test targets.
check: all
@tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ --arch=$(shell echo $(DEFAULT_ARCHES) | sed -e 's/ /,/g') \
$(TESTFLAGS)
$(addsuffix .check,$(MODES)): $$(basename $$@)
@@ -176,28 +202,56 @@ native.clean:
rm -rf $(OUTDIR)/native
find $(OUTDIR) -regex '.*\(host\|target\)-native\.mk' -delete
+android.clean:
+ rm -f $(OUTDIR)/Makefile-android
+ rm -rf $(OUTDIR)/android.release
+ find $(OUTDIR) -regex '.*\(host\|target\)-android\.mk' -delete
+
clean: $(addsuffix .clean,$(ARCHES)) native.clean
# GYP file generation targets.
$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE)
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \
-S-ia32 $(GYPFLAGS)
$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE)
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \
-S-x64 $(GYPFLAGS)
-$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE)
+$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE) build/armu.gypi
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \
-S-arm $(GYPFLAGS)
+$(OUTDIR)/Makefile-mips: $(GYPFILES) $(ENVFILE) build/mipsu.gypi
+ GYP_GENERATORS=make \
+ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Ibuild/mipsu.gypi \
+ -S-mips $(GYPFLAGS)
+
$(OUTDIR)/Makefile-native: $(GYPFILES) $(ENVFILE)
+ GYP_GENERATORS=make \
build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
-Ibuild/standalone.gypi --depth=. -S-native $(GYPFLAGS)
+$(OUTDIR)/Makefile-android: $(GYPFILES) $(ENVFILE) build/android.gypi \
+ must-set-ANDROID_NDK_ROOT
+ GYP_GENERATORS=make \
+ CC="${ANDROID_TOOL_PREFIX}-gcc" \
+ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Ibuild/android.gypi \
+ -S-android $(GYPFLAGS)
+
+must-set-ANDROID_NDK_ROOT:
+ifndef ANDROID_NDK_ROOT
+ $(error ANDROID_NDK_ROOT is not set)
+endif
+
# Replaces the old with the new environment file if they're different, which
# will trigger GYP to regenerate Makefiles.
$(ENVFILE): $(ENVFILE).new
@@ -212,4 +266,4 @@ $(ENVFILE).new:
# Dependencies.
dependencies:
svn checkout --force http://gyp.googlecode.com/svn/trunk build/gyp \
- --revision 1026
+ --revision 1282
diff --git a/src/3rdparty/v8/SConstruct b/src/3rdparty/v8/SConstruct
index 4069da7..34d0efc 100644
--- a/src/3rdparty/v8/SConstruct
+++ b/src/3rdparty/v8/SConstruct
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -33,6 +33,7 @@ import os
from os.path import join, dirname, abspath
from types import DictType, StringTypes
root_dir = dirname(File('SConstruct').rfile().abspath)
+src_dir = join(root_dir, 'src')
sys.path.insert(0, join(root_dir, 'tools'))
import js2c, utils
@@ -53,7 +54,7 @@ GCC_DTOA_EXTRA_CCFLAGS = []
LIBRARY_FLAGS = {
'all': {
- 'CPPPATH': [join(root_dir, 'src')],
+ 'CPPPATH': [src_dir],
'regexp:interpreted': {
'CPPDEFINES': ['V8_INTERPRETED_REGEXP']
},
@@ -111,13 +112,13 @@ LIBRARY_FLAGS = {
}
},
'os:freebsd': {
- 'CPPPATH' : ['/usr/local/include'],
+ 'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'],
'CCFLAGS': ['-ansi'],
'LIBS': ['execinfo']
},
'os:openbsd': {
- 'CPPPATH' : ['/usr/local/include'],
+ 'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'],
'CCFLAGS': ['-ansi'],
},
@@ -125,10 +126,14 @@ LIBRARY_FLAGS = {
# On Solaris, to get isinf, INFINITY, fpclassify and other macros one
# needs to define __C99FEATURES__.
'CPPDEFINES': ['__C99FEATURES__'],
- 'CPPPATH' : ['/usr/local/include'],
+ 'CPPPATH' : [src_dir, '/usr/local/include'],
'LIBPATH' : ['/usr/local/lib'],
'CCFLAGS': ['-ansi'],
},
+ 'os:netbsd': {
+ 'CPPPATH' : [src_dir, '/usr/pkg/include'],
+ 'LIBPATH' : ['/usr/pkg/lib'],
+ },
'os:win32': {
'CCFLAGS': ['-DWIN32'],
'CXXFLAGS': ['-DWIN32'],
@@ -180,6 +185,9 @@ LIBRARY_FLAGS = {
'mips_arch_variant:mips32r2': {
'CPPDEFINES': ['_MIPS_ARCH_MIPS32R2']
},
+ 'mips_arch_variant:loongson': {
+ 'CPPDEFINES': ['_MIPS_ARCH_LOONGSON']
+ },
'simulator:none': {
'CCFLAGS': ['-EL'],
'LINKFLAGS': ['-EL'],
@@ -189,6 +197,9 @@ LIBRARY_FLAGS = {
'mips_arch_variant:mips32r1': {
'CCFLAGS': ['-mips32', '-Wa,-mips32']
},
+ 'mips_arch_variant:loongson': {
+ 'CCFLAGS': ['-march=mips3', '-Wa,-march=mips3']
+ },
'library:static': {
'LINKFLAGS': ['-static', '-static-libgcc']
},
@@ -207,9 +218,12 @@ LIBRARY_FLAGS = {
'LINKFLAGS': ['-m32'],
'mipsabi:softfloat': {
'CPPDEFINES': ['__mips_soft_float=1'],
+ 'fpu:on': {
+ 'CPPDEFINES' : ['CAN_USE_FPU_INSTRUCTIONS']
+ }
},
'mipsabi:hardfloat': {
- 'CPPDEFINES': ['__mips_hard_float=1'],
+ 'CPPDEFINES': ['__mips_hard_float=1', 'CAN_USE_FPU_INSTRUCTIONS'],
}
},
'arch:x64': {
@@ -291,6 +305,7 @@ V8_EXTRA_FLAGS = {
'-Werror',
'-W',
'-Wno-unused-parameter',
+ '-Woverloaded-virtual',
'-Wnon-virtual-dtor']
},
'os:win32': {
@@ -369,6 +384,9 @@ MKSNAPSHOT_EXTRA_FLAGS = {
'os:win32': {
'LIBS': ['winmm', 'ws2_32'],
},
+ 'os:netbsd': {
+ 'LIBS': ['execinfo', 'pthread']
+ },
'compress_startup_data:bz2': {
'os:linux': {
'LIBS': ['bz2']
@@ -401,7 +419,7 @@ DTOA_EXTRA_FLAGS = {
CCTEST_EXTRA_FLAGS = {
'all': {
- 'CPPPATH': [join(root_dir, 'src')],
+ 'CPPPATH': [src_dir],
'library:shared': {
'CPPDEFINES': ['USING_V8_SHARED']
},
@@ -433,6 +451,9 @@ CCTEST_EXTRA_FLAGS = {
'os:win32': {
'LIBS': ['winmm', 'ws2_32']
},
+ 'os:netbsd': {
+ 'LIBS': ['execinfo', 'pthread']
+ },
'arch:arm': {
'LINKFLAGS': ARM_LINK_FLAGS
},
@@ -455,7 +476,7 @@ CCTEST_EXTRA_FLAGS = {
SAMPLE_FLAGS = {
'all': {
- 'CPPPATH': [join(abspath('.'), 'include')],
+ 'CPPPATH': [join(root_dir, 'include')],
'library:shared': {
'CPPDEFINES': ['USING_V8_SHARED']
},
@@ -492,6 +513,10 @@ SAMPLE_FLAGS = {
'os:win32': {
'LIBS': ['winmm', 'ws2_32']
},
+ 'os:netbsd': {
+ 'LIBPATH' : ['/usr/pkg/lib'],
+ 'LIBS': ['execinfo', 'pthread']
+ },
'arch:arm': {
'LINKFLAGS': ARM_LINK_FLAGS,
'armeabi:soft' : {
@@ -529,6 +554,9 @@ SAMPLE_FLAGS = {
'mips_arch_variant:mips32r2': {
'CPPDEFINES': ['_MIPS_ARCH_MIPS32R2']
},
+ 'mips_arch_variant:loongson': {
+ 'CPPDEFINES': ['_MIPS_ARCH_LOONGSON']
+ },
'simulator:none': {
'CCFLAGS': ['-EL'],
'LINKFLAGS': ['-EL'],
@@ -538,6 +566,9 @@ SAMPLE_FLAGS = {
'mips_arch_variant:mips32r1': {
'CCFLAGS': ['-mips32', '-Wa,-mips32']
},
+ 'mips_arch_variant:loongson': {
+ 'CCFLAGS': ['-march=mips3', '-Wa,-march=mips3']
+ },
'library:static': {
'LINKFLAGS': ['-static', '-static-libgcc']
},
@@ -547,7 +578,10 @@ SAMPLE_FLAGS = {
},
'mipsabi:hardfloat': {
'CCFLAGS': ['-mhard-float'],
- 'LINKFLAGS': ['-mhard-float']
+ 'LINKFLAGS': ['-mhard-float'],
+ 'fpu:on': {
+ 'CPPDEFINES' : ['CAN_USE_FPU_INSTRUCTIONS']
+ }
}
}
},
@@ -634,7 +668,7 @@ SAMPLE_FLAGS = {
PREPARSER_FLAGS = {
'all': {
- 'CPPPATH': [join(abspath('.'), 'include'), join(abspath('.'), 'src')],
+ 'CPPPATH': [join(root_dir, 'include'), src_dir],
'library:shared': {
'CPPDEFINES': ['USING_V8_SHARED']
},
@@ -681,6 +715,9 @@ PREPARSER_FLAGS = {
'mips_arch_variant:mips32r2': {
'CPPDEFINES': ['_MIPS_ARCH_MIPS32R2']
},
+ 'mips_arch_variant:loongson': {
+ 'CPPDEFINES': ['_MIPS_ARCH_LOONGSON']
+ },
'simulator:none': {
'CCFLAGS': ['-EL'],
'LINKFLAGS': ['-EL'],
@@ -690,6 +727,9 @@ PREPARSER_FLAGS = {
'mips_arch_variant:mips32r1': {
'CCFLAGS': ['-mips32', '-Wa,-mips32']
},
+ 'mips_arch_variant:loongson': {
+ 'CCFLAGS': ['-march=mips3', '-Wa,-march=mips3']
+ },
'library:static': {
'LINKFLAGS': ['-static', '-static-libgcc']
},
@@ -823,6 +863,9 @@ D8_FLAGS = {
'os:win32': {
'LIBS': ['winmm', 'ws2_32'],
},
+ 'os:netbsd': {
+ 'LIBS': ['pthread'],
+ },
'arch:arm': {
'LINKFLAGS': ARM_LINK_FLAGS
},
@@ -956,7 +999,7 @@ PLATFORM_OPTIONS = {
'help': 'the architecture to build for'
},
'os': {
- 'values': ['freebsd', 'linux', 'macos', 'win32', 'openbsd', 'solaris', 'cygwin'],
+ 'values': ['freebsd', 'linux', 'macos', 'win32', 'openbsd', 'solaris', 'cygwin', 'netbsd'],
'guess': GuessOS,
'help': 'the os to build for'
},
@@ -1095,7 +1138,7 @@ SIMPLE_OPTIONS = {
'help': 'generate calling conventiont according to selected mips ABI'
},
'mips_arch_variant': {
- 'values': ['mips32r2', 'mips32r1'],
+ 'values': ['mips32r2', 'mips32r1', 'loongson'],
'default': 'mips32r2',
'help': 'mips variant'
},
@@ -1109,6 +1152,11 @@ SIMPLE_OPTIONS = {
'default': 'on',
'help': 'use vfp3 instructions when building the snapshot [Arm only]'
},
+ 'fpu': {
+ 'values': ['on', 'off'],
+ 'default': 'on',
+ 'help': 'use fpu instructions when building the snapshot [MIPS only]'
+ },
}
diff --git a/src/3rdparty/v8/benchmarks/README.txt b/src/3rdparty/v8/benchmarks/README.txt
index 6676f37..59f76ff 100644
--- a/src/3rdparty/v8/benchmarks/README.txt
+++ b/src/3rdparty/v8/benchmarks/README.txt
@@ -77,3 +77,10 @@ input strings.
Furthermore, the benchmark runner was changed to run the benchmarks
for at least a few times to stabilize the reported numbers on slower
machines.
+
+
+Changes from Version 6 to Version 7
+===================================
+
+Added the Navier-Stokes benchmark, a 2D differential equation solver
+that stresses arithmetic computations on double arrays.
diff --git a/src/3rdparty/v8/benchmarks/base.js b/src/3rdparty/v8/benchmarks/base.js
index ffabf24..62c37e1 100644
--- a/src/3rdparty/v8/benchmarks/base.js
+++ b/src/3rdparty/v8/benchmarks/base.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -78,7 +78,7 @@ BenchmarkSuite.suites = [];
// Scores are not comparable across versions. Bump the version if
// you're making changes that will affect that scores, e.g. if you add
// a new benchmark or change an existing one.
-BenchmarkSuite.version = '6';
+BenchmarkSuite.version = '7';
// To make the benchmark results predictable, we replace Math.random
diff --git a/src/3rdparty/v8/benchmarks/navier-stokes.js b/src/3rdparty/v8/benchmarks/navier-stokes.js
new file mode 100644
index 0000000..b0dc3c8
--- /dev/null
+++ b/src/3rdparty/v8/benchmarks/navier-stokes.js
@@ -0,0 +1,387 @@
+/**
+ * Copyright 2012 the V8 project authors. All rights reserved.
+ * Copyright 2009 Oliver Hunt <http://nerget.com>
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+var NavierStokes = new BenchmarkSuite('NavierStokes', 1484000,
+ [new Benchmark('NavierStokes',
+ runNavierStokes,
+ setupNavierStokes,
+ tearDownNavierStokes)]);
+
+var solver = null;
+
+function runNavierStokes()
+{
+ solver.update();
+}
+
+function setupNavierStokes()
+{
+ solver = new FluidField(null);
+ solver.setResolution(128, 128);
+ solver.setIterations(20);
+ solver.setDisplayFunction(function(){});
+ solver.setUICallback(prepareFrame);
+ solver.reset();
+}
+
+function tearDownNavierStokes()
+{
+ solver = null;
+}
+
+function addPoints(field) {
+ var n = 64;
+ for (var i = 1; i <= n; i++) {
+ field.setVelocity(i, i, n, n);
+ field.setDensity(i, i, 5);
+ field.setVelocity(i, n - i, -n, -n);
+ field.setDensity(i, n - i, 20);
+ field.setVelocity(128 - i, n + i, -n, -n);
+ field.setDensity(128 - i, n + i, 30);
+ }
+}
+
+var framesTillAddingPoints = 0;
+var framesBetweenAddingPoints = 5;
+
+function prepareFrame(field)
+{
+ if (framesTillAddingPoints == 0) {
+ addPoints(field);
+ framesTillAddingPoints = framesBetweenAddingPoints;
+ framesBetweenAddingPoints++;
+ } else {
+ framesTillAddingPoints--;
+ }
+}
+
+// Code from Oliver Hunt (http://nerget.com/fluidSim/pressure.js) starts here.
+function FluidField(canvas) {
+ function addFields(x, s, dt)
+ {
+ for (var i=0; i<size ; i++ ) x[i] += dt*s[i];
+ }
+
+ function set_bnd(b, x)
+ {
+ if (b===1) {
+ for (var i = 1; i <= width; i++) {
+ x[i] = x[i + rowSize];
+ x[i + (height+1) *rowSize] = x[i + height * rowSize];
+ }
+
+ for (var j = 1; i <= height; i++) {
+ x[j * rowSize] = -x[1 + j * rowSize];
+ x[(width + 1) + j * rowSize] = -x[width + j * rowSize];
+ }
+ } else if (b === 2) {
+ for (var i = 1; i <= width; i++) {
+ x[i] = -x[i + rowSize];
+ x[i + (height + 1) * rowSize] = -x[i + height * rowSize];
+ }
+
+ for (var j = 1; j <= height; j++) {
+ x[j * rowSize] = x[1 + j * rowSize];
+ x[(width + 1) + j * rowSize] = x[width + j * rowSize];
+ }
+ } else {
+ for (var i = 1; i <= width; i++) {
+ x[i] = x[i + rowSize];
+ x[i + (height + 1) * rowSize] = x[i + height * rowSize];
+ }
+
+ for (var j = 1; j <= height; j++) {
+ x[j * rowSize] = x[1 + j * rowSize];
+ x[(width + 1) + j * rowSize] = x[width + j * rowSize];
+ }
+ }
+ var maxEdge = (height + 1) * rowSize;
+ x[0] = 0.5 * (x[1] + x[rowSize]);
+ x[maxEdge] = 0.5 * (x[1 + maxEdge] + x[height * rowSize]);
+ x[(width+1)] = 0.5 * (x[width] + x[(width + 1) + rowSize]);
+ x[(width+1)+maxEdge] = 0.5 * (x[width + maxEdge] + x[(width + 1) + height * rowSize]);
+ }
+
+ function lin_solve(b, x, x0, a, c)
+ {
+ if (a === 0 && c === 1) {
+ for (var j=1 ; j<=height; j++) {
+ var currentRow = j * rowSize;
+ ++currentRow;
+ for (var i = 0; i < width; i++) {
+ x[currentRow] = x0[currentRow];
+ ++currentRow;
+ }
+ }
+ set_bnd(b, x);
+ } else {
+ var invC = 1 / c;
+ for (var k=0 ; k<iterations; k++) {
+ for (var j=1 ; j<=height; j++) {
+ var lastRow = (j - 1) * rowSize;
+ var currentRow = j * rowSize;
+ var nextRow = (j + 1) * rowSize;
+ var lastX = x[currentRow];
+ ++currentRow;
+ for (var i=1; i<=width; i++)
+ lastX = x[currentRow] = (x0[currentRow] + a*(lastX+x[++currentRow]+x[++lastRow]+x[++nextRow])) * invC;
+ }
+ set_bnd(b, x);
+ }
+ }
+ }
+
+ function diffuse(b, x, x0, dt)
+ {
+ var a = 0;
+ lin_solve(b, x, x0, a, 1 + 4*a);
+ }
+
+ function lin_solve2(x, x0, y, y0, a, c)
+ {
+ if (a === 0 && c === 1) {
+ for (var j=1 ; j <= height; j++) {
+ var currentRow = j * rowSize;
+ ++currentRow;
+ for (var i = 0; i < width; i++) {
+ x[currentRow] = x0[currentRow];
+ y[currentRow] = y0[currentRow];
+ ++currentRow;
+ }
+ }
+ set_bnd(1, x);
+ set_bnd(2, y);
+ } else {
+ var invC = 1/c;
+ for (var k=0 ; k<iterations; k++) {
+ for (var j=1 ; j <= height; j++) {
+ var lastRow = (j - 1) * rowSize;
+ var currentRow = j * rowSize;
+ var nextRow = (j + 1) * rowSize;
+ var lastX = x[currentRow];
+ var lastY = y[currentRow];
+ ++currentRow;
+ for (var i = 1; i <= width; i++) {
+ lastX = x[currentRow] = (x0[currentRow] + a * (lastX + x[currentRow] + x[lastRow] + x[nextRow])) * invC;
+ lastY = y[currentRow] = (y0[currentRow] + a * (lastY + y[++currentRow] + y[++lastRow] + y[++nextRow])) * invC;
+ }
+ }
+ set_bnd(1, x);
+ set_bnd(2, y);
+ }
+ }
+ }
+
+ function diffuse2(x, x0, y, y0, dt)
+ {
+ var a = 0;
+ lin_solve2(x, x0, y, y0, a, 1 + 4 * a);
+ }
+
+ function advect(b, d, d0, u, v, dt)
+ {
+ var Wdt0 = dt * width;
+ var Hdt0 = dt * height;
+ var Wp5 = width + 0.5;
+ var Hp5 = height + 0.5;
+ for (var j = 1; j<= height; j++) {
+ var pos = j * rowSize;
+ for (var i = 1; i <= width; i++) {
+ var x = i - Wdt0 * u[++pos];
+ var y = j - Hdt0 * v[pos];
+ if (x < 0.5)
+ x = 0.5;
+ else if (x > Wp5)
+ x = Wp5;
+ var i0 = x | 0;
+ var i1 = i0 + 1;
+ if (y < 0.5)
+ y = 0.5;
+ else if (y > Hp5)
+ y = Hp5;
+ var j0 = y | 0;
+ var j1 = j0 + 1;
+ var s1 = x - i0;
+ var s0 = 1 - s1;
+ var t1 = y - j0;
+ var t0 = 1 - t1;
+ var row1 = j0 * rowSize;
+ var row2 = j1 * rowSize;
+ d[pos] = s0 * (t0 * d0[i0 + row1] + t1 * d0[i0 + row2]) + s1 * (t0 * d0[i1 + row1] + t1 * d0[i1 + row2]);
+ }
+ }
+ set_bnd(b, d);
+ }
+
+ function project(u, v, p, div)
+ {
+ var h = -0.5 / Math.sqrt(width * height);
+ for (var j = 1 ; j <= height; j++ ) {
+ var row = j * rowSize;
+ var previousRow = (j - 1) * rowSize;
+ var prevValue = row - 1;
+ var currentRow = row;
+ var nextValue = row + 1;
+ var nextRow = (j + 1) * rowSize;
+ for (var i = 1; i <= width; i++ ) {
+ div[++currentRow] = h * (u[++nextValue] - u[++prevValue] + v[++nextRow] - v[++previousRow]);
+ p[currentRow] = 0;
+ }
+ }
+ set_bnd(0, div);
+ set_bnd(0, p);
+
+ lin_solve(0, p, div, 1, 4 );
+ var wScale = 0.5 * width;
+ var hScale = 0.5 * height;
+ for (var j = 1; j<= height; j++ ) {
+ var prevPos = j * rowSize - 1;
+ var currentPos = j * rowSize;
+ var nextPos = j * rowSize + 1;
+ var prevRow = (j - 1) * rowSize;
+ var currentRow = j * rowSize;
+ var nextRow = (j + 1) * rowSize;
+
+ for (var i = 1; i<= width; i++) {
+ u[++currentPos] -= wScale * (p[++nextPos] - p[++prevPos]);
+ v[currentPos] -= hScale * (p[++nextRow] - p[++prevRow]);
+ }
+ }
+ set_bnd(1, u);
+ set_bnd(2, v);
+ }
+
+ function dens_step(x, x0, u, v, dt)
+ {
+ addFields(x, x0, dt);
+ diffuse(0, x0, x, dt );
+ advect(0, x, x0, u, v, dt );
+ }
+
+ function vel_step(u, v, u0, v0, dt)
+ {
+ addFields(u, u0, dt );
+ addFields(v, v0, dt );
+ var temp = u0; u0 = u; u = temp;
+ var temp = v0; v0 = v; v = temp;
+ diffuse2(u,u0,v,v0, dt);
+ project(u, v, u0, v0);
+ var temp = u0; u0 = u; u = temp;
+ var temp = v0; v0 = v; v = temp;
+ advect(1, u, u0, u0, v0, dt);
+ advect(2, v, v0, u0, v0, dt);
+ project(u, v, u0, v0 );
+ }
+ var uiCallback = function(d,u,v) {};
+
+ function Field(dens, u, v) {
+ // Just exposing the fields here rather than using accessors is a measurable win during display (maybe 5%)
+ // but makes the code ugly.
+ this.setDensity = function(x, y, d) {
+ dens[(x + 1) + (y + 1) * rowSize] = d;
+ }
+ this.getDensity = function(x, y) {
+ return dens[(x + 1) + (y + 1) * rowSize];
+ }
+ this.setVelocity = function(x, y, xv, yv) {
+ u[(x + 1) + (y + 1) * rowSize] = xv;
+ v[(x + 1) + (y + 1) * rowSize] = yv;
+ }
+ this.getXVelocity = function(x, y) {
+ return u[(x + 1) + (y + 1) * rowSize];
+ }
+ this.getYVelocity = function(x, y) {
+ return v[(x + 1) + (y + 1) * rowSize];
+ }
+ this.width = function() { return width; }
+ this.height = function() { return height; }
+ }
+ function queryUI(d, u, v)
+ {
+ for (var i = 0; i < size; i++)
+ u[i] = v[i] = d[i] = 0.0;
+ uiCallback(new Field(d, u, v));
+ }
+
+ this.update = function () {
+ queryUI(dens_prev, u_prev, v_prev);
+ vel_step(u, v, u_prev, v_prev, dt);
+ dens_step(dens, dens_prev, u, v, dt);
+ displayFunc(new Field(dens, u, v));
+ }
+ this.setDisplayFunction = function(func) {
+ displayFunc = func;
+ }
+
+ this.iterations = function() { return iterations; }
+ this.setIterations = function(iters) {
+ if (iters > 0 && iters <= 100)
+ iterations = iters;
+ }
+ this.setUICallback = function(callback) {
+ uiCallback = callback;
+ }
+ var iterations = 10;
+ var visc = 0.5;
+ var dt = 0.1;
+ var dens;
+ var dens_prev;
+ var u;
+ var u_prev;
+ var v;
+ var v_prev;
+ var width;
+ var height;
+ var rowSize;
+ var size;
+ var displayFunc;
+ function reset()
+ {
+ rowSize = width + 2;
+ size = (width+2)*(height+2);
+ dens = new Array(size);
+ dens_prev = new Array(size);
+ u = new Array(size);
+ u_prev = new Array(size);
+ v = new Array(size);
+ v_prev = new Array(size);
+ for (var i = 0; i < size; i++)
+ dens_prev[i] = u_prev[i] = v_prev[i] = dens[i] = u[i] = v[i] = 0;
+ }
+ this.reset = reset;
+ this.setResolution = function (hRes, wRes)
+ {
+ var res = wRes * hRes;
+ if (res > 0 && res < 1000000 && (wRes != width || hRes != height)) {
+ width = wRes;
+ height = hRes;
+ reset();
+ return true;
+ }
+ return false;
+ }
+ this.setResolution(64, 64);
+}
diff --git a/src/3rdparty/v8/benchmarks/revisions.html b/src/3rdparty/v8/benchmarks/revisions.html
index 6ff75be..3ce9889 100644
--- a/src/3rdparty/v8/benchmarks/revisions.html
+++ b/src/3rdparty/v8/benchmarks/revisions.html
@@ -19,6 +19,10 @@ not comparable unless both results are run with the same revision of
the benchmark suite.
</p>
+<div class="subtitle"><h3>Version 7 (<a href="http://v8.googlecode.com/svn/data/benchmarks/v7/run.html">link</a>)</h3></div>
+
+<p>This version includes the new Navier-Stokes benchmark, a 2D differential
+ equation solver that stresses arithmetic computations on double arrays.</p>
<div class="subtitle"><h3>Version 6 (<a href="http://v8.googlecode.com/svn/data/benchmarks/v6/run.html">link</a>)</h3></div>
diff --git a/src/3rdparty/v8/benchmarks/run.html b/src/3rdparty/v8/benchmarks/run.html
index 36d2ad5..f1d14c1 100644
--- a/src/3rdparty/v8/benchmarks/run.html
+++ b/src/3rdparty/v8/benchmarks/run.html
@@ -14,6 +14,7 @@
<script type="text/javascript" src="earley-boyer.js"></script>
<script type="text/javascript" src="regexp.js"></script>
<script type="text/javascript" src="splay.js"></script>
+<script type="text/javascript" src="navier-stokes.js"></script>
<link type="text/css" rel="stylesheet" href="style.css" />
<script type="text/javascript">
var completed = 0;
@@ -52,16 +53,16 @@ function Run() {
BenchmarkSuite.RunSuites({ NotifyStep: ShowProgress,
NotifyError: AddError,
NotifyResult: AddResult,
- NotifyScore: AddScore });
+ NotifyScore: AddScore });
}
function ShowWarningIfObsolete() {
- // If anything goes wrong we will just catch the exception and no
+ // If anything goes wrong we will just catch the exception and no
// warning is shown, i.e., no harm is done.
try {
var xmlhttp;
- var next_version = parseInt(BenchmarkSuite.version) + 1;
- var next_version_url = "../v" + next_version + "/run.html";
+ var next_version = parseInt(BenchmarkSuite.version) + 1;
+ var next_version_url = "../v" + next_version + "/run.html";
if (window.XMLHttpRequest) {
xmlhttp = new window.XMLHttpRequest();
} else if (window.ActiveXObject) {
@@ -75,7 +76,7 @@ function ShowWarningIfObsolete() {
};
xmlhttp.send(null);
} catch(e) {
- // Ignore exception if check for next version fails.
+ // Ignore exception if check for next version fails.
// Hence no warning is displayed.
}
}
@@ -83,7 +84,7 @@ function ShowWarningIfObsolete() {
function Load() {
var version = BenchmarkSuite.version;
document.getElementById("version").innerHTML = version;
- ShowWarningIfObsolete();
+ ShowWarningIfObsolete();
setTimeout(Run, 200);
}
</script>
@@ -91,11 +92,11 @@ function Load() {
<body onload="Load()">
<div>
<div class="title"><h1>V8 Benchmark Suite - version <span id="version">?</span></h1></div>
- <div class="warning" id="obsolete">
+ <div class="warning" id="obsolete">
Warning! This is not the latest version of the V8 benchmark
-suite. Consider running the
+suite. Consider running the
<a href="http://v8.googlecode.com/svn/data/benchmarks/current/run.html">
-latest version</a>.
+latest version</a>.
</div>
<table>
<tr>
@@ -117,6 +118,7 @@ higher scores means better performance: <em>Bigger is better!</em>
(<i>1761 lines</i>).
</li>
<li><b>Splay</b><br>Data manipulation benchmark that deals with splay trees and exercises the automatic memory management subsystem (<i>394 lines</i>).</li>
+<li><b>NavierStokes</b><br>Solves NavierStokes equations in 2D, heavily manipulating double precision arrays. Based on Oliver Hunt's code (<i>387 lines</i>).</li>
</ul>
<p>
diff --git a/src/3rdparty/v8/benchmarks/run.js b/src/3rdparty/v8/benchmarks/run.js
index da95fb4..58f6265 100644
--- a/src/3rdparty/v8/benchmarks/run.js
+++ b/src/3rdparty/v8/benchmarks/run.js
@@ -34,6 +34,7 @@ load('raytrace.js');
load('earley-boyer.js');
load('regexp.js');
load('splay.js');
+load('navier-stokes.js');
var success = true;
diff --git a/src/3rdparty/v8/benchmarks/spinning-balls/v.js b/src/3rdparty/v8/benchmarks/spinning-balls/v.js
index d8a5e2a..5ae1194 100644
--- a/src/3rdparty/v8/benchmarks/spinning-balls/v.js
+++ b/src/3rdparty/v8/benchmarks/spinning-balls/v.js
@@ -61,6 +61,7 @@ var numberOfFrames = 0;
var sumOfSquaredPauses = 0;
var benchmarkStartTime = void 0;
var benchmarkTimeLimit = void 0;
+var autoScale = void 0;
var pauseDistribution = [];
@@ -193,7 +194,7 @@ function ModifyPointsSet() {
}
-function PausePlot(width, height, size) {
+function PausePlot(width, height, size, scale) {
var canvas = document.createElement("canvas");
canvas.width = this.width = width;
canvas.height = this.height = height;
@@ -201,7 +202,14 @@ function PausePlot(width, height, size) {
this.ctx = canvas.getContext('2d');
- this.maxPause = 0;
+ if (typeof scale !== "number") {
+ this.autoScale = true;
+ this.maxPause = 0;
+ } else {
+ this.autoScale = false;
+ this.maxPause = scale;
+ }
+
this.size = size;
// Initialize cyclic buffer for pauses.
@@ -248,18 +256,21 @@ PausePlot.prototype.iteratePauses = function (f) {
PausePlot.prototype.draw = function () {
var first = null;
- this.iteratePauses(function (i, v) {
- if (first === null) {
- first = v;
- }
- this.maxPause = Math.max(v, this.maxPause);
- });
+
+ if (this.autoScale) {
+ this.iteratePauses(function (i, v) {
+ if (first === null) {
+ first = v;
+ }
+ this.maxPause = Math.max(v, this.maxPause);
+ });
+ }
var dx = this.width / this.size;
var dy = this.height / this.maxPause;
this.ctx.save();
- this.ctx.clearRect(0, 0, 480, 240);
+ this.ctx.clearRect(0, 0, this.width, this.height);
this.ctx.beginPath();
this.ctx.moveTo(1, dy * this.pauses[this.start]);
var p = first;
@@ -414,23 +425,54 @@ function render() {
}
-function renderForm() {
- form = document.createElement("form");
- form.setAttribute("action", "javascript:start()");
- var label = document.createTextNode("Time limit in seconds ");
- var input = document.createElement("input");
- input.setAttribute("id", "timelimit");
- input.setAttribute("value", "60");
- var button = document.createElement("input");
+function Form() {
+ function create(tag) { return document.createElement(tag); }
+ function text(value) { return document.createTextNode(value); }
+
+ this.form = create("form");
+ this.form.setAttribute("action", "javascript:start()");
+
+ var table = create("table");
+ table.setAttribute("style", "margin-left: auto; margin-right: auto;");
+
+ function col(a) {
+ var td = create("td");
+ td.appendChild(a);
+ return td;
+ }
+
+ function row(a, b) {
+ var tr = create("tr");
+ tr.appendChild(col(a));
+ tr.appendChild(col(b));
+ return tr;
+ }
+
+ this.timelimit = create("input");
+ this.timelimit.setAttribute("value", "60");
+
+ table.appendChild(row(text("Time limit in seconds"), this.timelimit));
+
+ this.autoscale = create("input");
+ this.autoscale.setAttribute("type", "checkbox");
+ this.autoscale.setAttribute("checked", "true");
+ table.appendChild(row(text("Autoscale pauses plot"), this.autoscale));
+
+ var button = create("input");
button.setAttribute("type", "submit");
button.setAttribute("value", "Start");
- form.appendChild(label);
- form.appendChild(input);
- form.appendChild(button);
- document.body.appendChild(form);
+ this.form.appendChild(table);
+ this.form.appendChild(button);
+
+ document.body.appendChild(this.form);
}
+Form.prototype.remove = function () {
+ document.body.removeChild(this.form);
+};
+
+
function init() {
livePoints = new PointsList;
dyingPoints = new PointsList;
@@ -442,14 +484,15 @@ function init() {
div = document.createElement("div");
document.body.appendChild(div);
- pausePlot = new PausePlot(480, 240, 160);
+ pausePlot = new PausePlot(480, autoScale ? 240 : 500, 160, autoScale ? void 0 : 500);
}
function start() {
- benchmarkTimeLimit = document.getElementById("timelimit").value * 1000;
- document.body.removeChild(form);
+ benchmarkTimeLimit = form.timelimit.value * 1000;
+ autoScale = form.autoscale.checked;
+ form.remove();
init();
render();
}
-renderForm();
+var form = new Form();
diff --git a/src/3rdparty/v8/build/android.gypi b/src/3rdparty/v8/build/android.gypi
new file mode 100644
index 0000000..ffd0648
--- /dev/null
+++ b/src/3rdparty/v8/build/android.gypi
@@ -0,0 +1,225 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Definitions for building standalone V8 binaries to run on Android.
+# This is mostly excerpted from:
+# http://src.chromium.org/viewvc/chrome/trunk/src/build/common.gypi
+
+{
+ 'variables': {
+ # Location of Android NDK.
+ 'variables': {
+ 'variables': {
+ 'android_ndk_root%': '<!(/bin/echo -n $ANDROID_NDK_ROOT)',
+ 'android_target_arch%': 'arm', # target_arch in android terms.
+
+ # Switch between different build types, currently only '0' is
+ # supported.
+ 'android_build_type%': 0,
+ },
+ 'android_ndk_root%': '<(android_ndk_root)',
+ 'android_ndk_sysroot': '<(android_ndk_root)/platforms/android-9/arch-<(android_target_arch)',
+ 'android_build_type%': '<(android_build_type)',
+ },
+ 'android_ndk_root%': '<(android_ndk_root)',
+ 'android_ndk_sysroot': '<(android_ndk_sysroot)',
+ 'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+ 'android_ndk_lib': '<(android_ndk_sysroot)/usr/lib',
+ # Enable to use the system stlport, otherwise statically
+ # link the NDK one?
+ 'use_system_stlport%': '<(android_build_type)',
+ 'android_stlport_library': 'stlport_static',
+ # Copy it out one scope.
+ 'android_build_type%': '<(android_build_type)',
+
+ 'OS': 'android',
+ 'target_arch': 'arm',
+ 'v8_target_arch': 'arm',
+ 'armv7': 1,
+ 'arm_neon': 0,
+ 'arm_fpu': 'vfpv3',
+ }, # variables
+ 'target_defaults': {
+ 'defines': [
+ 'ANDROID',
+ 'V8_ANDROID_LOG_STDOUT',
+ ],
+ 'configurations': {
+ 'Release': {
+ 'cflags!': [
+ '-O2',
+ '-Os',
+ ],
+ 'cflags': [
+ '-fdata-sections',
+ '-ffunction-sections',
+ '-fomit-frame-pointer',
+ '-O3',
+ ],
+ }, # Release
+ }, # configurations
+ 'cflags': [ '-Wno-abi', '-Wall', '-W', '-Wno-unused-parameter',
+ '-Wnon-virtual-dtor', '-fno-rtti', '-fno-exceptions', ],
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags!': [
+ '-pthread', # Not supported by Android toolchain.
+ ],
+ 'cflags': [
+ '-U__linux__', # Don't allow toolchain to claim -D__linux__
+ '-ffunction-sections',
+ '-funwind-tables',
+ '-fstack-protector',
+ '-fno-short-enums',
+ '-finline-limit=64',
+ '-Wa,--noexecstack',
+ '-Wno-error=non-virtual-dtor', # TODO(michaelbai): Fix warnings.
+ # Note: This include is in cflags to ensure that it comes after
+ # all of the includes.
+ '-I<(android_ndk_include)',
+ '-march=armv7-a',
+ '-mtune=cortex-a8',
+ '-mfpu=vfp3',
+ ],
+ 'defines': [
+ 'ANDROID',
+ #'__GNU_SOURCE=1', # Necessary for clone()
+ 'USE_STLPORT=1',
+ '_STLP_USE_PTR_SPECIALIZATIONS=1',
+ 'HAVE_OFF64_T',
+ 'HAVE_SYS_UIO_H',
+ 'ANDROID_BINSIZE_HACK', # Enable temporary hacks to reduce binsize.
+ ],
+ 'ldflags!': [
+ '-pthread', # Not supported by Android toolchain.
+ ],
+ 'ldflags': [
+ '-nostdlib',
+ '-Wl,--no-undefined',
+ '-Wl,--icf=safe', # Enable identical code folding to reduce size
+ # Don't export symbols from statically linked libraries.
+ '-Wl,--exclude-libs=ALL',
+ ],
+ 'libraries!': [
+ '-lrt', # librt is built into Bionic.
+ # Not supported by Android toolchain.
+ # Where do these come from? Can't find references in
+ # any Chromium gyp or gypi file. Maybe they come from
+ # gyp itself?
+ '-lpthread', '-lnss3', '-lnssutil3', '-lsmime3', '-lplds4', '-lplc4', '-lnspr4',
+ ],
+ 'libraries': [
+ '-l<(android_stlport_library)',
+ # Manually link the libgcc.a that the cross compiler uses.
+ '<!($CC -print-libgcc-file-name)',
+ '-lc',
+ '-ldl',
+ '-lstdc++',
+ '-lm',
+ ],
+ 'conditions': [
+ ['android_build_type==0', {
+ 'ldflags': [
+ '-Wl,-rpath-link=<(android_ndk_lib)',
+ '-L<(android_ndk_lib)',
+ ],
+ }],
+ # NOTE: The stlport header include paths below are specified in
+ # cflags rather than include_dirs because they need to come
+ # after include_dirs. Think of them like system headers, but
+ # don't use '-isystem' because the arm-linux-androideabi-4.4.3
+ # toolchain (circa Gingerbread) will exhibit strange errors.
+ # The include ordering here is important; change with caution.
+ ['use_system_stlport==0', {
+ 'cflags': [
+ '-I<(android_ndk_root)/sources/cxx-stl/stlport/stlport',
+ ],
+ 'conditions': [
+ ['target_arch=="arm" and armv7==1', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi-v7a',
+ ],
+ }],
+ ['target_arch=="arm" and armv7==0', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/armeabi',
+ ],
+ }],
+ ['target_arch=="ia32"', {
+ 'ldflags': [
+ '-L<(android_ndk_root)/sources/cxx-stl/stlport/libs/x86',
+ ],
+ }],
+ ],
+ }],
+ ['target_arch=="ia32"', {
+ # The x86 toolchain currently has problems with stack-protector.
+ 'cflags!': [
+ '-fstack-protector',
+ ],
+ 'cflags': [
+ '-fno-stack-protector',
+ ],
+ }],
+ ],
+ 'target_conditions': [
+ ['_type=="executable"', {
+ 'ldflags': [
+ '-Bdynamic',
+ '-Wl,-dynamic-linker,/system/bin/linker',
+ '-Wl,--gc-sections',
+ '-Wl,-z,nocopyreloc',
+ # crtbegin_dynamic.o should be the last item in ldflags.
+ '<(android_ndk_lib)/crtbegin_dynamic.o',
+ ],
+ 'libraries': [
+ # crtend_android.o needs to be the last item in libraries.
+ # Do not add any libraries after this!
+ '<(android_ndk_lib)/crtend_android.o',
+ ],
+ }],
+ ['_type=="shared_library"', {
+ 'ldflags': [
+ '-Wl,-shared,-Bsymbolic',
+ ],
+ }],
+ ],
+ }], # _toolset=="target"
+ # Settings for building host targets using the system toolchain.
+ ['_toolset=="host"', {
+ 'cflags': [ '-m32', '-pthread' ],
+ 'ldflags': [ '-m32', '-pthread' ],
+ 'ldflags!': [
+ '-Wl,-z,noexecstack',
+ '-Wl,--gc-sections',
+ '-Wl,-O1',
+ '-Wl,--as-needed',
+ ],
+ }],
+ ], # target_conditions
+ }, # target_defaults
+} \ No newline at end of file
diff --git a/src/3rdparty/v8/build/common.gypi b/src/3rdparty/v8/build/common.gypi
index 230b1fd..3016d0c 100644
--- a/src/3rdparty/v8/build/common.gypi
+++ b/src/3rdparty/v8/build/common.gypi
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -50,12 +50,21 @@
# probing when running on the target.
'v8_can_use_vfp_instructions%': 'false',
+ # Similar to vfp but on MIPS.
+ 'v8_can_use_fpu_instructions%': 'true',
+
# Setting v8_use_arm_eabi_hardfloat to true will turn on V8 support for ARM
# EABI calling convention where double arguments are passed in VFP
# registers. Note that the GCC flag '-mfloat-abi=hard' should be used as
# well when compiling for the ARM target.
'v8_use_arm_eabi_hardfloat%': 'false',
+ # Similar to the ARM hard float ABI but on MIPS.
+ 'v8_use_mips_abi_hardfloat%': 'true',
+
+ # Default arch variant for MIPS.
+ 'mips_arch_variant%': 'mips32r2',
+
'v8_enable_debugger_support%': 1,
'v8_enable_disassembler%': 0,
@@ -67,6 +76,9 @@
# Enable profiling support. Only required on Windows.
'v8_enable_prof%': 0,
+ # Some versions of GCC 4.5 seem to need -fno-strict-aliasing.
+ 'v8_no_strict_aliasing%': 0,
+
# Chrome needs this definition unconditionally. For standalone V8 builds,
# it's handled in build/standalone.gypi.
'want_separate_host_toolset%': 1,
@@ -76,6 +88,11 @@
'v8_use_liveobjectlist%': 'false',
'werror%': '-Werror',
+ # With post mortem support enabled, metadata is embedded into libv8 that
+ # describes various parameters of the VM for use by debuggers. See
+ # tools/gen-postmortem-metadata.py for details.
+ 'v8_postmortem_support%': 'false',
+
# For a shared library build, results in "libv8-<(soname_version).so".
'soname_version%': '',
},
@@ -125,8 +142,10 @@
'USE_EABI_HARDFLOAT=1',
'CAN_USE_VFP_INSTRUCTIONS',
],
- 'cflags': [
- '-mfloat-abi=hard',
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': ['-mfloat-abi=hard',],
+ }],
],
}, {
'defines': [
@@ -135,7 +154,7 @@
}],
# The ARM assembler assumes the host is 32 bits,
# so force building 32-bit host tools.
- ['host_arch=="x64"', {
+ ['host_arch=="x64" or OS=="android"', {
'target_conditions': [
['_toolset=="host"', {
'cflags': ['-m32'],
@@ -154,6 +173,64 @@
'defines': [
'V8_TARGET_ARCH_MIPS',
],
+ 'conditions': [
+ [ 'target_arch=="mips"', {
+ 'target_conditions': [
+ ['_toolset=="target"', {
+ 'cflags': ['-EL'],
+ 'ldflags': ['-EL'],
+ 'conditions': [
+ [ 'v8_use_mips_abi_hardfloat=="true"', {
+ 'cflags': ['-mhard-float'],
+ 'ldflags': ['-mhard-float'],
+ }, {
+ 'cflags': ['-msoft-float'],
+ 'ldflags': ['-msoft-float'],
+ }],
+ ['mips_arch_variant=="mips32r2"', {
+ 'cflags': ['-mips32r2', '-Wa,-mips32r2'],
+ }],
+ ['mips_arch_variant=="loongson"', {
+ 'cflags': ['-mips3', '-Wa,-mips3'],
+ }, {
+ 'cflags': ['-mips32', '-Wa,-mips32'],
+ }],
+ ],
+ }],
+ ],
+ }],
+ [ 'v8_can_use_fpu_instructions=="true"', {
+ 'defines': [
+ 'CAN_USE_FPU_INSTRUCTIONS',
+ ],
+ }],
+ [ 'v8_use_mips_abi_hardfloat=="true"', {
+ 'defines': [
+ '__mips_hard_float=1',
+ 'CAN_USE_FPU_INSTRUCTIONS',
+ ],
+ }, {
+ 'defines': [
+ '__mips_soft_float=1'
+ ],
+ }],
+ ['mips_arch_variant=="mips32r2"', {
+ 'defines': ['_MIPS_ARCH_MIPS32R2',],
+ }],
+ ['mips_arch_variant=="loongson"', {
+ 'defines': ['_MIPS_ARCH_LOONGSON',],
+ }],
+ # The MIPS assembler assumes the host is 32 bits,
+ # so force building 32-bit host tools.
+ ['host_arch=="x64"', {
+ 'target_conditions': [
+ ['_toolset=="host"', {
+ 'cflags': ['-m32'],
+ 'ldflags': ['-m32'],
+ }],
+ ],
+ }],
+ ],
}],
['v8_target_arch=="x64"', {
'defines': [
@@ -161,6 +238,19 @@
],
}],
],
+ }, { # Section for OS=="mac".
+ 'conditions': [
+ ['target_arch=="ia32"', {
+ 'xcode_settings': {
+ 'ARCHS': ['i386'],
+ }
+ }],
+ ['target_arch=="x64"', {
+ 'xcode_settings': {
+ 'ARCHS': ['x86_64'],
+ }
+ }],
+ ],
}],
['v8_use_liveobjectlist=="true"', {
'defines': [
@@ -175,6 +265,11 @@
'COMPRESS_STARTUP_DATA_BZ2',
],
}],
+ ['OS=="win"', {
+ 'defines': [
+ 'WIN32',
+ ],
+ }],
['OS=="win" and v8_enable_prof==1', {
'msvs_settings': {
'VCLinkerTool': {
@@ -182,18 +277,22 @@
},
},
}],
- ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
+ or OS=="netbsd"', {
'conditions': [
[ 'target_arch=="ia32"', {
'cflags': [ '-m32' ],
'ldflags': [ '-m32' ],
}],
- ],
+ [ 'v8_no_strict_aliasing==1', {
+ 'cflags': [ '-fno-strict-aliasing' ],
+ }],
+ ], # conditions
}],
['OS=="solaris"', {
'defines': [ '__C99FEATURES__=1' ], # isinf() etc.
}],
- ],
+ ], # conditions
'configurations': {
'Debug': {
'defines': [
@@ -226,15 +325,19 @@
['OS=="freebsd" or OS=="openbsd"', {
'cflags': [ '-I/usr/local/include' ],
}],
- ['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
+ ['OS=="netbsd"', {
+ 'cflags': [ '-I/usr/pkg/include' ],
+ }],
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', {
'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter',
- '-Wnon-virtual-dtor' ],
+ '-Wnon-virtual-dtor', '-Woverloaded-virtual' ],
}],
],
- },
+ }, # Debug
'Release': {
'conditions': [
- ['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd" \
+ or OS=="android"', {
'cflags!': [
'-O2',
'-Os',
@@ -246,7 +349,7 @@
'-O3',
],
'conditions': [
- [ 'gcc_version==44', {
+ [ 'gcc_version==44 and clang==0', {
'cflags': [
# Avoid crashes with gcc 4.4 in the v8 test suite.
'-fno-tree-vrp',
@@ -257,6 +360,9 @@
['OS=="freebsd" or OS=="openbsd"', {
'cflags': [ '-I/usr/local/include' ],
}],
+ ['OS=="netbsd"', {
+ 'cflags': [ '-I/usr/pkg/include' ],
+ }],
['OS=="mac"', {
'xcode_settings': {
'GCC_OPTIMIZATION_LEVEL': '3', # -O3
@@ -267,7 +373,7 @@
# is specified explicitly.
'GCC_STRICT_ALIASING': 'YES',
},
- }],
+ }], # OS=="mac"
['OS=="win"', {
'msvs_configuration_attributes': {
'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)',
@@ -302,9 +408,9 @@
# 'StackReserveSize': '297152',
},
},
- }],
- ],
- },
- },
- },
+ }], # OS=="win"
+ ], # conditions
+ }, # Release
+ }, # configurations
+ }, # target_defaults
}
diff --git a/src/3rdparty/v8/build/gyp_v8 b/src/3rdparty/v8/build/gyp_v8
index dfdbe3f..0fe3403 100755
--- a/src/3rdparty/v8/build/gyp_v8
+++ b/src/3rdparty/v8/build/gyp_v8
@@ -1,6 +1,6 @@
#!/usr/bin/python
#
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -156,7 +156,12 @@ if __name__ == '__main__':
# Generate for the architectures supported on the given platform.
gyp_args = list(args)
- gyp_args.append('-Dtarget_arch=ia32')
+ target_arch = None
+ for p in gyp_args:
+ if p.find('-Dtarget_arch=') == 0:
+ target_arch = p
+ if target_arch is None:
+ gyp_args.append('-Dtarget_arch=ia32')
if utils.GuessOS() == 'linux':
gyp_args.append('-S-ia32')
run_gyp(gyp_args)
@@ -171,3 +176,8 @@ if __name__ == '__main__':
gyp_args.append('-I' + v8_root + '/build/armu.gypi')
gyp_args.append('-S-armu')
run_gyp(gyp_args)
+
+ gyp_args = list(args)
+ gyp_args.append('-I' + v8_root + '/build/mipsu.gypi')
+ gyp_args.append('-S-mipsu')
+ run_gyp(gyp_args)
diff --git a/src/3rdparty/v8/build/mipsu.gypi b/src/3rdparty/v8/build/mipsu.gypi
new file mode 100644
index 0000000..637ff84
--- /dev/null
+++ b/src/3rdparty/v8/build/mipsu.gypi
@@ -0,0 +1,33 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+ 'variables': {
+ 'target_arch': 'ia32',
+ 'v8_target_arch': 'mips',
+ },
+}
diff --git a/src/3rdparty/v8/build/standalone.gypi b/src/3rdparty/v8/build/standalone.gypi
index f24d9f8..e9b0565 100644
--- a/src/3rdparty/v8/build/standalone.gypi
+++ b/src/3rdparty/v8/build/standalone.gypi
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -37,13 +37,16 @@
'variables': {
'variables': {
'conditions': [
- [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
- # This handles the Linux platforms we generally deal with. Anything
- # else gets passed through, which probably won't work very well; such
- # hosts should pass an explicit target_arch to gyp.
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="netbsd"', {
+ # This handles the Linux platforms we generally deal with.
+ # Anything else gets passed through, which probably won't work
+ # very well; such hosts should pass an explicit target_arch
+ # to gyp.
'host_arch%':
- '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/")',
- }, { # OS!="linux" and OS!="freebsd" and OS!="openbsd"
+ '<!(uname -m | sed -e "s/i.86/ia32/;\
+ s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/;s/mips.*/mips/")',
+ }, {
+ # OS!="linux" and OS!="freebsd" and OS!="openbsd" and OS!="netbsd"
'host_arch%': 'ia32',
}],
],
@@ -61,6 +64,7 @@
'werror%': '-Werror',
'conditions': [
['(v8_target_arch=="arm" and host_arch!="arm") or \
+ (v8_target_arch=="mips" and host_arch!="mips") or \
(v8_target_arch=="x64" and host_arch!="x64")', {
'want_separate_host_toolset': 1,
}, {
@@ -77,7 +81,8 @@
},
},
'conditions': [
- [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
+ or OS=="netbsd"', {
'target_defaults': {
'cflags': [ '-Wall', '<(werror)', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor', '-pthread', '-fno-rtti',
@@ -95,11 +100,12 @@
}],
],
},
- }], # 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"'
+ }],
+ # 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"
+ # or OS=="netbsd"'
['OS=="win"', {
'target_defaults': {
'defines': [
- 'WIN32',
'_CRT_SECURE_NO_DEPRECATE',
'_CRT_NONSTDC_NO_DEPRECATE',
],
diff --git a/src/3rdparty/v8/include/v8-debug.h b/src/3rdparty/v8/include/v8-debug.h
index 9e85dc4..9e85dc4 100644..100755
--- a/src/3rdparty/v8/include/v8-debug.h
+++ b/src/3rdparty/v8/include/v8-debug.h
diff --git a/src/3rdparty/v8/include/v8-profiler.h b/src/3rdparty/v8/include/v8-profiler.h
index f67646f..e36659f 100644
--- a/src/3rdparty/v8/include/v8-profiler.h
+++ b/src/3rdparty/v8/include/v8-profiler.h
@@ -64,6 +64,7 @@
*/
namespace v8 {
+typedef uint32_t SnapshotObjectId;
/**
* CpuProfileNode represents a node in a call graph.
@@ -219,8 +220,9 @@ class V8EXPORT HeapGraphEdge {
// (e.g. parts of a ConsString).
kHidden = 4, // A link that is needed for proper sizes
// calculation, but may be hidden from user.
- kShortcut = 5 // A link that must not be followed during
+ kShortcut = 5, // A link that must not be followed during
// sizes calculation.
+ kWeak = 6 // A weak reference (ignored by the GC).
};
/** Returns edge type (see HeapGraphEdge::Type). */
@@ -254,7 +256,9 @@ class V8EXPORT HeapGraphNode {
kClosure = 5, // Function closure.
kRegExp = 6, // RegExp.
kHeapNumber = 7, // Number stored in the heap.
- kNative = 8 // Native object (not from V8 heap).
+ kNative = 8, // Native object (not from V8 heap).
+ kSynthetic = 9 // Synthetic object, usualy used for grouping
+ // snapshot items together.
};
/** Returns node type (see HeapGraphNode::Type). */
@@ -271,7 +275,7 @@ class V8EXPORT HeapGraphNode {
* Returns node id. For the same heap object, the id remains the same
* across all snapshots.
*/
- uint64_t GetId() const;
+ SnapshotObjectId GetId() const;
/** Returns node's own size, in bytes. */
int GetSelfSize() const;
@@ -281,14 +285,8 @@ class V8EXPORT HeapGraphNode {
* the objects that are reachable only from this object. In other
* words, the size of memory that will be reclaimed having this node
* collected.
- *
- * Exact retained size calculation has O(N) (number of nodes)
- * computational complexity, while approximate has O(1). It is
- * assumed that initially heap profiling tools provide approximate
- * sizes for all nodes, and then exact sizes are calculated for the
- * most 'interesting' nodes.
*/
- int GetRetainedSize(bool exact) const;
+ int GetRetainedSize() const;
/** Returns child nodes count of the node. */
int GetChildrenCount() const;
@@ -341,7 +339,7 @@ class V8EXPORT HeapSnapshot {
const HeapGraphNode* GetRoot() const;
/** Returns a node by its id. */
- const HeapGraphNode* GetNodeById(uint64_t id) const;
+ const HeapGraphNode* GetNodeById(SnapshotObjectId id) const;
/** Returns total nodes count in the snapshot. */
int GetNodesCount() const;
@@ -349,6 +347,9 @@ class V8EXPORT HeapSnapshot {
/** Returns a node by index. */
const HeapGraphNode* GetNode(int index) const;
+ /** Returns a max seen JS object Id. */
+ SnapshotObjectId GetMaxSnapshotJSObjectId() const;
+
/**
* Deletes the snapshot and removes it from HeapProfiler's list.
* All pointers to nodes, edges and paths previously returned become
@@ -433,6 +434,9 @@ class V8EXPORT HeapProfiler {
* handle.
*/
static const uint16_t kPersistentHandleNoClassId = 0;
+
+ /** Returns the number of currently existing persistent handles. */
+ static int GetPersistentHandleCount();
};
@@ -475,12 +479,23 @@ class V8EXPORT RetainedObjectInfo { // NOLINT
virtual intptr_t GetHash() = 0;
/**
- * Returns human-readable label. It must be a NUL-terminated UTF-8
+ * Returns human-readable label. It must be a null-terminated UTF-8
* encoded string. V8 copies its contents during a call to GetLabel.
*/
virtual const char* GetLabel() = 0;
/**
+ * Returns human-readable group label. It must be a null-terminated UTF-8
+ * encoded string. V8 copies its contents during a call to GetGroupLabel.
+ * Heap snapshot generator will collect all the group names, create
+ * top level entries with these names and attach the objects to the
+ * corresponding top level group objects. There is a default
+ * implementation which is required because embedders don't have their
+ * own implementation yet.
+ */
+ virtual const char* GetGroupLabel() { return GetLabel(); }
+
+ /**
* Returns element count in case if a global handle retains
* a subgraph by holding one of its nodes.
*/
diff --git a/src/3rdparty/v8/include/v8.h b/src/3rdparty/v8/include/v8.h
index a7b5c8a..33179f5 100644
--- a/src/3rdparty/v8/include/v8.h
+++ b/src/3rdparty/v8/include/v8.h
@@ -587,12 +587,6 @@ class ScriptOrigin {
*/
class V8EXPORT Script {
public:
- enum CompileFlags {
- Default = 0x00,
- QmlMode = 0x01,
- NativeMode = 0x02
- };
-
/**
* Compiles the specified script (context-independent).
*
@@ -611,8 +605,7 @@ class V8EXPORT Script {
static Local<Script> New(Handle<String> source,
ScriptOrigin* origin = NULL,
ScriptData* pre_data = NULL,
- Handle<String> script_data = Handle<String>(),
- CompileFlags = Default);
+ Handle<String> script_data = Handle<String>());
/**
* Compiles the specified script using the specified file name
@@ -625,8 +618,7 @@ class V8EXPORT Script {
* will use the currently entered context).
*/
static Local<Script> New(Handle<String> source,
- Handle<Value> file_name,
- CompileFlags = Default);
+ Handle<Value> file_name);
/**
* Compiles the specified script (bound to current context).
@@ -647,8 +639,7 @@ class V8EXPORT Script {
static Local<Script> Compile(Handle<String> source,
ScriptOrigin* origin = NULL,
ScriptData* pre_data = NULL,
- Handle<String> script_data = Handle<String>(),
- CompileFlags = Default);
+ Handle<String> script_data = Handle<String>());
/**
* Compiles the specified script using the specified file name
@@ -665,8 +656,7 @@ class V8EXPORT Script {
*/
static Local<Script> Compile(Handle<String> source,
Handle<Value> file_name,
- Handle<String> script_data = Handle<String>(),
- CompileFlags = Default);
+ Handle<String> script_data = Handle<String>());
/**
* Runs the script returning the resulting value. If the script is
@@ -676,7 +666,6 @@ class V8EXPORT Script {
* compiled.
*/
Local<Value> Run();
- Local<Value> Run(Handle<Object> qml);
/**
* Returns the script id value.
@@ -967,11 +956,6 @@ class Value : public Data {
*/
V8EXPORT bool IsRegExp() const;
- /**
- * Returns true if this value is an Error.
- */
- V8EXPORT bool IsError() const;
-
V8EXPORT Local<Boolean> ToBoolean() const;
V8EXPORT Local<Number> ToNumber() const;
V8EXPORT Local<String> ToString() const;
@@ -1037,47 +1021,12 @@ class String : public Primitive {
V8EXPORT int Utf8Length() const;
/**
- * Returns the hash of this string.
+ * A fast conservative check for non-ASCII characters. May
+ * return true even for ASCII strings, but if it returns
+ * false you can be sure that all characters are in the range
+ * 0-127.
*/
- V8EXPORT uint32_t Hash() const;
-
- struct CompleteHashData {
- CompleteHashData() : length(0), hash(0), symbol_id(0) {}
- int length;
- uint32_t hash;
- uint32_t symbol_id;
- };
-
- /**
- * Returns the "complete" hash of the string. This is
- * all the information about the string needed to implement
- * a very efficient hash keyed on the string.
- *
- * The members of CompleteHashData are:
- * length: The length of the string. Equivalent to Length()
- * hash: The hash of the string. Equivalent to Hash()
- * symbol_id: If the string is a sequential symbol, the symbol
- * id, otherwise 0. If the symbol ids of two strings are
- * the same (and non-zero) the two strings are identical.
- * If the symbol ids are different the strings may still be
- * identical, but an Equals() check must be performed.
- */
- V8EXPORT CompleteHashData CompleteHash() const;
-
- /**
- * Compute a hash value for the passed UTF16 string
- * data.
- */
- V8EXPORT static uint32_t ComputeHash(uint16_t *string, int length);
- V8EXPORT static uint32_t ComputeHash(char *string, int length);
-
- /**
- * Returns true if this string is equal to the external
- * string data provided.
- */
- V8EXPORT bool Equals(uint16_t *string, int length);
- V8EXPORT bool Equals(char *string, int length);
- inline bool Equals(Handle<Value> that) const { return v8::Value::Equals(that); }
+ V8EXPORT bool MayContainNonAscii() const;
/**
* Write the contents of the string to an external buffer.
@@ -1110,8 +1059,6 @@ class String : public Primitive {
NO_NULL_TERMINATION = 2
};
- V8EXPORT uint16_t GetCharacter(int index);
-
// 16-bit character codes.
V8EXPORT int Write(uint16_t* buffer,
int start = 0,
@@ -1259,7 +1206,7 @@ class String : public Primitive {
* passed in as parameters.
*/
V8EXPORT static Local<String> Concat(Handle<String> left,
- Handle<String>right);
+ Handle<String> right);
/**
* Creates a new external string using the data defined in the given
@@ -1613,25 +1560,6 @@ class Object : public Value {
/** Sets a native pointer in an internal field. */
V8EXPORT void SetPointerInInternalField(int index, void* value);
- class V8EXPORT ExternalResource { // NOLINT
- public:
- ExternalResource() {}
- virtual ~ExternalResource() {}
-
- protected:
- virtual void Dispose() { delete this; }
-
- private:
- // Disallow copying and assigning.
- ExternalResource(const ExternalResource&);
- void operator=(const ExternalResource&);
-
- friend class v8::internal::Heap;
- };
-
- V8EXPORT void SetExternalResource(ExternalResource *);
- V8EXPORT ExternalResource *GetExternalResource();
-
// Testers for local properties.
V8EXPORT bool HasOwnProperty(Handle<String> key);
V8EXPORT bool HasRealNamedProperty(Handle<String> key);
@@ -1812,13 +1740,28 @@ class Function : public Object {
V8EXPORT Handle<Value> GetName() const;
/**
+ * Name inferred from variable or property assignment of this function.
+ * Used to facilitate debugging and profiling of JavaScript code written
+ * in an OO style, where many functions are anonymous but are assigned
+ * to object properties.
+ */
+ V8EXPORT Handle<Value> GetInferredName() const;
+
+ /**
* Returns zero based line number of function body and
* kLineOffsetNotFound if no information available.
*/
V8EXPORT int GetScriptLineNumber() const;
+ /**
+ * Returns zero based column number of function body and
+ * kLineOffsetNotFound if no information available.
+ */
+ V8EXPORT int GetScriptColumnNumber() const;
+ V8EXPORT Handle<Value> GetScriptId() const;
V8EXPORT ScriptOrigin GetScriptOrigin() const;
static inline Function* Cast(Value* obj);
V8EXPORT static const int kLineOffsetNotFound;
+
private:
V8EXPORT Function();
V8EXPORT static void CheckCast(Value* obj);
@@ -2340,7 +2283,6 @@ class V8EXPORT FunctionTemplate : public Template {
NamedPropertyQuery query,
NamedPropertyDeleter remover,
NamedPropertyEnumerator enumerator,
- bool is_fallback,
Handle<Value> data);
void SetIndexedInstancePropertyHandler(IndexedPropertyGetter getter,
IndexedPropertySetter setter,
@@ -2424,12 +2366,6 @@ class V8EXPORT ObjectTemplate : public Template {
NamedPropertyDeleter deleter = 0,
NamedPropertyEnumerator enumerator = 0,
Handle<Value> data = Handle<Value>());
- void SetFallbackPropertyHandler(NamedPropertyGetter getter,
- NamedPropertySetter setter = 0,
- NamedPropertyQuery query = 0,
- NamedPropertyDeleter deleter = 0,
- NamedPropertyEnumerator enumerator = 0,
- Handle<Value> data = Handle<Value>());
/**
* Sets an indexed property handler on the object template.
@@ -2501,18 +2437,6 @@ class V8EXPORT ObjectTemplate : public Template {
*/
void SetInternalFieldCount(int value);
- /**
- * Sets whether the object can store an "external resource" object.
- */
- bool HasExternalResource();
- void SetHasExternalResource(bool value);
-
- /**
- * Mark object instances of the template as using the user object
- * comparison callback.
- */
- void MarkAsUseUserObjectComparison();
-
private:
ObjectTemplate();
static Local<ObjectTemplate> New(Handle<FunctionTemplate> constructor);
@@ -2579,7 +2503,7 @@ class V8EXPORT Extension { // NOLINT
int source_length = -1);
virtual ~Extension() { }
virtual v8::Handle<v8::FunctionTemplate>
- GetNativeFunction(v8::Handle<v8::String>) {
+ GetNativeFunction(v8::Handle<v8::String> name) {
return v8::Handle<v8::FunctionTemplate>();
}
@@ -2727,15 +2651,14 @@ typedef void (*MemoryAllocationCallback)(ObjectSpace space,
AllocationAction action,
int size);
+// --- Leave Script Callback ---
+typedef void (*CallCompletedCallback)();
+
// --- Failed Access Check Callback ---
typedef void (*FailedAccessCheckCallback)(Local<Object> target,
AccessType type,
Local<Value> data);
-// --- User Object Comparisoa nCallback ---
-typedef bool (*UserObjectComparisonCallback)(Local<Object> lhs,
- Local<Object> rhs);
-
// --- AllowCodeGenerationFromStrings callbacks ---
/**
@@ -2810,7 +2733,7 @@ class RetainedObjectInfo;
* default isolate is implicitly created and entered. The embedder
* can create additional isolates and use them in parallel in multiple
* threads. An isolate can be entered by at most one thread at any
- * given time. The Locker/Unlocker API can be used to synchronize.
+ * given time. The Locker/Unlocker API must be used to synchronize.
*/
class V8EXPORT Isolate {
public:
@@ -2941,6 +2864,31 @@ class V8EXPORT StartupDataDecompressor { // NOLINT
*/
typedef bool (*EntropySource)(unsigned char* buffer, size_t length);
+
+/**
+ * ReturnAddressLocationResolver is used as a callback function when v8 is
+ * resolving the location of a return address on the stack. Profilers that
+ * change the return address on the stack can use this to resolve the stack
+ * location to whereever the profiler stashed the original return address.
+ * When invoked, return_addr_location will point to a location on stack where
+ * a machine return address resides, this function should return either the
+ * same pointer, or a pointer to the profiler's copy of the original return
+ * address.
+ */
+typedef uintptr_t (*ReturnAddressLocationResolver)(
+ uintptr_t return_addr_location);
+
+
+/**
+ * Interface for iterating though all external resources in the heap.
+ */
+class V8EXPORT ExternalResourceVisitor { // NOLINT
+ public:
+ virtual ~ExternalResourceVisitor() {}
+ virtual void VisitExternalString(Handle<String> string) {}
+};
+
+
/**
* Container class for static utility functions.
*/
@@ -3062,9 +3010,6 @@ class V8EXPORT V8 {
/** Callback function for reporting failed access checks.*/
static void SetFailedAccessCheckCallbackFunction(FailedAccessCheckCallback);
- /** Callback for user object comparisons */
- static void SetUserObjectComparisonCallbackFunction(UserObjectComparisonCallback);
-
/**
* Enables the host application to receive a notification before a
* garbage collection. Allocations are not allowed in the
@@ -3132,12 +3077,25 @@ class V8EXPORT V8 {
AllocationAction action);
/**
- * This function removes callback which was installed by
- * AddMemoryAllocationCallback function.
+ * Removes callback that was installed by AddMemoryAllocationCallback.
*/
static void RemoveMemoryAllocationCallback(MemoryAllocationCallback callback);
/**
+ * Adds a callback to notify the host application when a script finished
+ * running. If a script re-enters the runtime during executing, the
+ * CallCompletedCallback is only invoked when the outer-most script
+ * execution ends. Executing scripts inside the callback do not trigger
+ * further callbacks.
+ */
+ static void AddCallCompletedCallback(CallCompletedCallback callback);
+
+ /**
+ * Removes callback that was installed by AddCallCompletedCallback.
+ */
+ static void RemoveCallCompletedCallback(CallCompletedCallback callback);
+
+ /**
* Allows the host application to group objects together. If one
* object in the group is alive, all objects in the group are alive.
* After each garbage collection, object groups are removed. It is
@@ -3175,6 +3133,13 @@ class V8EXPORT V8 {
static void SetEntropySource(EntropySource source);
/**
+ * Allows the host application to provide a callback that allows v8 to
+ * cooperate with a profiler that rewrites return addresses on stack.
+ */
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver return_address_resolver);
+
+ /**
* Adjusts the amount of registered external memory. Used to give
* V8 an indication of the amount of externally allocated memory
* that is kept alive by JavaScript objects. V8 uses this to decide
@@ -3287,14 +3252,25 @@ class V8EXPORT V8 {
static void GetHeapStatistics(HeapStatistics* heap_statistics);
/**
+ * Iterates through all external resources referenced from current isolate
+ * heap. This method is not expected to be used except for debugging purposes
+ * and may be quite slow.
+ */
+ static void VisitExternalResources(ExternalResourceVisitor* visitor);
+
+ /**
* Optional notification that the embedder is idle.
* V8 uses the notification to reduce memory footprint.
* This call can be used repeatedly if the embedder remains idle.
* Returns true if the embedder should stop calling IdleNotification
* until real work has been done. This indicates that V8 has done
* as much cleanup as it will be able to do.
+ *
+ * The hint argument specifies the amount of work to be done in the function
+ * on scale from 1 to 1000. There is no guarantee that the actual work will
+ * match the hint.
*/
- static bool IdleNotification();
+ static bool IdleNotification(int hint = 1000);
/**
* Optional notification that the system is running low on memory.
@@ -3535,8 +3511,6 @@ class V8EXPORT Context {
* JavaScript frames an empty handle is returned.
*/
static Local<Context> GetCalling();
- static Local<Object> GetCallingQmlGlobal();
- static Local<Value> GetCallingScriptData();
/**
* Sets the security token for the context. To access an object in
@@ -3594,6 +3568,12 @@ class V8EXPORT Context {
void AllowCodeGenerationFromStrings(bool allow);
/**
+ * Returns true if code generation from strings is allowed for the context.
+ * For more details see AllowCodeGenerationFromStrings(bool) documentation.
+ */
+ bool IsCodeGenerationFromStringsAllowed();
+
+ /**
* Stack-allocated class which sets the execution context for all
* operations executed within a local scope.
*/
@@ -3622,7 +3602,9 @@ class V8EXPORT Context {
* accessing handles or holding onto object pointers obtained
* from V8 handles while in the particular V8 isolate. It is up
* to the user of V8 to ensure (perhaps with locking) that this
- * constraint is not violated.
+ * constraint is not violated. In addition to any other synchronization
+ * mechanism that may be used, the v8::Locker and v8::Unlocker classes
+ * must be used to signal thead switches to V8.
*
* v8::Locker is a scoped lock object. While it's
* active (i.e. between its construction and destruction) the current thread is
@@ -3805,8 +3787,8 @@ class V8EXPORT ActivityControl { // NOLINT
namespace internal {
-static const int kApiPointerSize = sizeof(void*); // NOLINT
-static const int kApiIntSize = sizeof(int); // NOLINT
+const int kApiPointerSize = sizeof(void*); // NOLINT
+const int kApiIntSize = sizeof(int); // NOLINT
// Tag information for HeapObject.
const int kHeapObjectTag = 1;
@@ -3897,7 +3879,7 @@ class Internals {
static const int kFullStringRepresentationMask = 0x07;
static const int kExternalTwoByteRepresentationTag = 0x02;
- static const int kJSObjectType = 0xa6;
+ static const int kJSObjectType = 0xaa;
static const int kFirstNonstringType = 0x80;
static const int kForeignType = 0x85;
@@ -3946,13 +3928,13 @@ class Internals {
return *reinterpret_cast<T*>(addr);
}
- static inline bool CanCastToHeapObject(void*) { return false; }
- static inline bool CanCastToHeapObject(Context*) { return true; }
- static inline bool CanCastToHeapObject(String*) { return true; }
- static inline bool CanCastToHeapObject(Object*) { return true; }
- static inline bool CanCastToHeapObject(Message*) { return true; }
- static inline bool CanCastToHeapObject(StackTrace*) { return true; }
- static inline bool CanCastToHeapObject(StackFrame*) { return true; }
+ static inline bool CanCastToHeapObject(void* o) { return false; }
+ static inline bool CanCastToHeapObject(Context* o) { return true; }
+ static inline bool CanCastToHeapObject(String* o) { return true; }
+ static inline bool CanCastToHeapObject(Object* o) { return true; }
+ static inline bool CanCastToHeapObject(Message* o) { return true; }
+ static inline bool CanCastToHeapObject(StackTrace* o) { return true; }
+ static inline bool CanCastToHeapObject(StackFrame* o) { return true; }
};
} // namespace internal
diff --git a/src/3rdparty/v8/include/v8stdint.h b/src/3rdparty/v8/include/v8stdint.h
index 50b4f29..7c12e1f 100644
--- a/src/3rdparty/v8/include/v8stdint.h
+++ b/src/3rdparty/v8/include/v8stdint.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,6 +30,7 @@
#ifndef V8STDINT_H_
#define V8STDINT_H_
+#include <stddef.h>
#include <stdio.h>
#if defined(_WIN32) && !defined(__MINGW32__)
diff --git a/src/3rdparty/v8/preparser/preparser-process.cc b/src/3rdparty/v8/preparser/preparser-process.cc
index b0aeb81..368f63f 100644
--- a/src/3rdparty/v8/preparser/preparser-process.cc
+++ b/src/3rdparty/v8/preparser/preparser-process.cc
@@ -200,12 +200,14 @@ void fail(v8::PreParserData* data, const char* message, ...) {
vfprintf(stderr, message, args);
va_end(args);
fflush(stderr);
- // Print preparser data to stdout.
- uint32_t size = data->size();
- fprintf(stderr, "LOG: data size: %u\n", size);
- if (!WriteBuffer(stdout, data->data(), size)) {
- perror("ERROR: Writing data");
- fflush(stderr);
+ if (data != NULL) {
+ // Print preparser data to stdout.
+ uint32_t size = data->size();
+ fprintf(stderr, "LOG: data size: %u\n", size);
+ if (!WriteBuffer(stdout, data->data(), size)) {
+ perror("ERROR: Writing data");
+ fflush(stderr);
+ }
}
exit(EXIT_FAILURE);
}
diff --git a/src/3rdparty/v8/samples/shell.cc b/src/3rdparty/v8/samples/shell.cc
index b40eca2..db0cc1a 100644
--- a/src/3rdparty/v8/samples/shell.cc
+++ b/src/3rdparty/v8/samples/shell.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -67,17 +67,20 @@ static bool run_shell;
int main(int argc, char* argv[]) {
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
run_shell = (argc == 1);
- v8::HandleScope handle_scope;
- v8::Persistent<v8::Context> context = CreateShellContext();
- if (context.IsEmpty()) {
- printf("Error creating context\n");
- return 1;
+ int result;
+ {
+ v8::HandleScope handle_scope;
+ v8::Persistent<v8::Context> context = CreateShellContext();
+ if (context.IsEmpty()) {
+ printf("Error creating context\n");
+ return 1;
+ }
+ context->Enter();
+ result = RunMain(argc, argv);
+ if (run_shell) RunShell(context);
+ context->Exit();
+ context.Dispose();
}
- context->Enter();
- int result = RunMain(argc, argv);
- if (run_shell) RunShell(context);
- context->Exit();
- context.Dispose();
v8::V8::Dispose();
return result;
}
diff --git a/src/3rdparty/v8/src/SConscript b/src/3rdparty/v8/src/SConscript
index 42de36b..0d0b535 100755
--- a/src/3rdparty/v8/src/SConscript
+++ b/src/3rdparty/v8/src/SConscript
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -59,6 +59,7 @@ SOURCES = {
counters.cc
cpu-profiler.cc
data-flow.cc
+ date.cc
dateparser.cc
debug-agent.cc
debug.cc
@@ -78,13 +79,13 @@ SOURCES = {
fast-dtoa.cc
fixed-dtoa.cc
handles.cc
- hashmap.cc
heap-profiler.cc
heap.cc
hydrogen.cc
hydrogen-instructions.cc
ic.cc
incremental-marking.cc
+ interface.cc
inspector.cc
interpreter-irregexp.cc
isolate.cc
@@ -100,6 +101,7 @@ SOURCES = {
objects.cc
objects-printer.cc
objects-visiting.cc
+ once.cc
parser.cc
preparser.cc
preparse-data.cc
@@ -246,7 +248,6 @@ PREPARSER_SOURCES = {
dtoa.cc
fast-dtoa.cc
fixed-dtoa.cc
- hashmap.cc
preparse-data.cc
preparser.cc
preparser-api.cc
diff --git a/src/3rdparty/v8/src/accessors.cc b/src/3rdparty/v8/src/accessors.cc
index 02998f9..8048738 100644
--- a/src/3rdparty/v8/src/accessors.cc
+++ b/src/3rdparty/v8/src/accessors.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -26,15 +26,16 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
-
#include "accessors.h"
-#include "ast.h"
+
+#include "contexts.h"
#include "deoptimizer.h"
#include "execution.h"
#include "factory.h"
+#include "frames-inl.h"
+#include "isolate.h"
#include "list-inl.h"
-#include "safepoint-table.h"
-#include "scopeinfo.h"
+#include "property-details.h"
namespace v8 {
namespace internal {
@@ -486,16 +487,6 @@ MaybeObject* Accessors::FunctionSetPrototype(JSObject* object,
NONE);
}
- if (function->has_initial_map()) {
- // If the function has allocated the initial map
- // replace it with a copy containing the new prototype.
- Object* new_map;
- { MaybeObject* maybe_new_map =
- function->initial_map()->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
- }
- function->set_initial_map(Map::cast(new_map));
- }
Object* prototype;
{ MaybeObject* maybe_prototype = function->SetPrototype(value);
if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
@@ -574,11 +565,12 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction(
Handle<JSFunction> inlined_function,
int inlined_frame_index) {
Factory* factory = Isolate::Current()->factory();
- int args_count = inlined_function->shared()->formal_parameter_count();
- ScopedVector<SlotRef> args_slots(args_count);
- SlotRef::ComputeSlotMappingForArguments(frame,
- inlined_frame_index,
- &args_slots);
+ Vector<SlotRef> args_slots =
+ SlotRef::ComputeSlotMappingForArguments(
+ frame,
+ inlined_frame_index,
+ inlined_function->shared()->formal_parameter_count());
+ int args_count = args_slots.length();
Handle<JSObject> arguments =
factory->NewArgumentsObject(inlined_function, args_count);
Handle<FixedArray> array = factory->NewFixedArray(args_count);
@@ -587,6 +579,7 @@ static MaybeObject* ConstructArgumentsObjectForInlinedFunction(
array->set(i, *value);
}
arguments->set_elements(*array);
+ args_slots.Dispose();
// Return the freshly allocated arguments object.
return *arguments;
@@ -621,8 +614,9 @@ MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
if (!frame->is_optimized()) {
// If there is an arguments variable in the stack, we return that.
- Handle<SerializedScopeInfo> info(function->shared()->scope_info());
- int index = info->StackSlotIndex(isolate->heap()->arguments_symbol());
+ Handle<ScopeInfo> scope_info(function->shared()->scope_info());
+ int index = scope_info->StackSlotIndex(
+ isolate->heap()->arguments_symbol());
if (index >= 0) {
Handle<Object> arguments(frame->GetExpression(index), isolate);
if (!arguments->IsArgumentsMarker()) return *arguments;
@@ -674,7 +668,7 @@ static MaybeObject* CheckNonStrictCallerOrThrow(
Isolate* isolate,
JSFunction* caller) {
DisableAssertNoAllocation enable_allocation;
- if (caller->shared()->strict_mode()) {
+ if (!caller->shared()->is_classic_mode()) {
return isolate->Throw(
*isolate->factory()->NewTypeError("strict_caller",
HandleVector<Object>(NULL, 0)));
diff --git a/src/3rdparty/v8/src/accessors.h b/src/3rdparty/v8/src/accessors.h
index 385536d..36b9a99 100644
--- a/src/3rdparty/v8/src/accessors.h
+++ b/src/3rdparty/v8/src/accessors.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,7 @@
#define V8_ACCESSORS_H_
#include "allocation.h"
+#include "v8globals.h"
namespace v8 {
namespace internal {
diff --git a/src/3rdparty/v8/src/allocation.cc b/src/3rdparty/v8/src/allocation.cc
index 119b087..6c7a08c 100644
--- a/src/3rdparty/v8/src/allocation.cc
+++ b/src/3rdparty/v8/src/allocation.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,10 +25,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "../include/v8stdint.h"
-#include "globals.h"
-#include "checks.h"
#include "allocation.h"
+
+#include <stdlib.h> // For free, malloc.
+#include <string.h> // For memcpy.
+#include "checks.h"
#include "utils.h"
namespace v8 {
diff --git a/src/3rdparty/v8/src/allocation.h b/src/3rdparty/v8/src/allocation.h
index 75aba35..31067dd 100644
--- a/src/3rdparty/v8/src/allocation.h
+++ b/src/3rdparty/v8/src/allocation.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,7 +28,6 @@
#ifndef V8_ALLOCATION_H_
#define V8_ALLOCATION_H_
-#include "checks.h"
#include "globals.h"
namespace v8 {
@@ -81,7 +80,7 @@ class AllStatic {
template <typename T>
-static T* NewArray(int size) {
+T* NewArray(size_t size) {
T* result = new T[size];
if (result == NULL) Malloced::FatalProcessOutOfMemory();
return result;
@@ -89,7 +88,7 @@ static T* NewArray(int size) {
template <typename T>
-static void DeleteArray(T* array) {
+void DeleteArray(T* array) {
delete[] array;
}
diff --git a/src/3rdparty/v8/src/api.cc b/src/3rdparty/v8/src/api.cc
index 9f42ff9..bb9ba23 100644
--- a/src/3rdparty/v8/src/api.cc
+++ b/src/3rdparty/v8/src/api.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,34 +25,39 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "v8.h"
-
#include "api.h"
-#include "arguments.h"
+#include <math.h> // For isnan.
+#include <string.h> // For memcpy, strlen.
+#include "../include/v8-debug.h"
+#include "../include/v8-profiler.h"
+#include "../include/v8-testing.h"
#include "bootstrapper.h"
#include "compiler.h"
+#include "conversions-inl.h"
+#include "counters.h"
#include "debug.h"
#include "deoptimizer.h"
#include "execution.h"
-#include "flags.h"
#include "global-handles.h"
#include "heap-profiler.h"
#include "messages.h"
+#ifdef COMPRESS_STARTUP_DATA_BZ2
#include "natives.h"
+#endif
#include "parser.h"
#include "platform.h"
#include "profile-generator-inl.h"
+#include "property-details.h"
+#include "property.h"
#include "runtime-profiler.h"
#include "scanner-character-streams.h"
-#include "serialize.h"
#include "snapshot.h"
+#include "unicode-inl.h"
#include "v8threads.h"
#include "version.h"
#include "vm-state-inl.h"
-#include "../include/v8-profiler.h"
-#include "../include/v8-testing.h"
#define LOG_API(isolate, expr) LOG(isolate, ApiEntryCall(expr))
@@ -78,7 +83,7 @@ namespace v8 {
bool has_pending_exception = false
-#define EXCEPTION_BAILOUT_CHECK(isolate, value) \
+#define EXCEPTION_BAILOUT_CHECK_GENERIC(isolate, value, do_callback) \
do { \
i::HandleScopeImplementer* handle_scope_implementer = \
(isolate)->handle_scope_implementer(); \
@@ -91,11 +96,22 @@ namespace v8 {
} \
bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \
(isolate)->OptionalRescheduleException(call_depth_is_zero); \
+ do_callback \
return value; \
} \
+ do_callback \
} while (false)
+#define EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, value) \
+ EXCEPTION_BAILOUT_CHECK_GENERIC( \
+ isolate, value, i::V8::FireCallCompletedCallback(isolate);)
+
+
+#define EXCEPTION_BAILOUT_CHECK(isolate, value) \
+ EXCEPTION_BAILOUT_CHECK_GENERIC(isolate, value, ;)
+
+
#define API_ENTRY_CHECK(isolate, msg) \
do { \
if (v8::Locker::IsActive()) { \
@@ -344,6 +360,7 @@ int StartupDataDecompressor::Decompress() {
compressed_data[i].data = decompressed;
}
V8::SetDecompressedStartupData(compressed_data);
+ i::DeleteArray(compressed_data);
return 0;
}
@@ -486,7 +503,7 @@ RegisteredExtension* RegisteredExtension::first_extension_ = NULL;
RegisteredExtension::RegisteredExtension(Extension* extension)
- : extension_(extension), state_(UNVISITED) { }
+ : extension_(extension) { }
void RegisteredExtension::Register(RegisteredExtension* that) {
@@ -495,6 +512,16 @@ void RegisteredExtension::Register(RegisteredExtension* that) {
}
+void RegisteredExtension::UnregisterAll() {
+ RegisteredExtension* re = first_extension_;
+ while (re != NULL) {
+ RegisteredExtension* next = re->next();
+ delete re;
+ re = next;
+ }
+}
+
+
void RegisterExtension(Extension* that) {
RegisteredExtension* extension = new RegisteredExtension(that);
RegisteredExtension::Register(extension);
@@ -508,7 +535,8 @@ Extension::Extension(const char* name,
int source_length)
: name_(name),
source_length_(source_length >= 0 ?
- source_length : (source ? strlen(source) : 0)),
+ source_length :
+ (source ? static_cast<int>(strlen(source)) : 0)),
source_(source, source_length_),
dep_count_(dep_count),
deps_(deps),
@@ -730,6 +758,7 @@ void Context::Exit() {
i::Context* last_context =
isolate->handle_scope_implementer()->RestoreContext();
isolate->set_context(last_context);
+ isolate->set_context_exit_happened(true);
}
@@ -1123,7 +1152,6 @@ void FunctionTemplate::SetNamedInstancePropertyHandler(
NamedPropertyQuery query,
NamedPropertyDeleter remover,
NamedPropertyEnumerator enumerator,
- bool is_fallback,
Handle<Value> data) {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate,
@@ -1142,7 +1170,6 @@ void FunctionTemplate::SetNamedInstancePropertyHandler(
if (query != 0) SET_FIELD_WRAPPED(obj, set_query, query);
if (remover != 0) SET_FIELD_WRAPPED(obj, set_deleter, remover);
if (enumerator != 0) SET_FIELD_WRAPPED(obj, set_enumerator, enumerator);
- obj->set_is_fallback(i::Smi::FromInt(is_fallback));
if (data.IsEmpty()) data = v8::Undefined();
obj->set_data(*Utils::OpenHandle(*data));
@@ -1287,33 +1314,6 @@ void ObjectTemplate::SetNamedPropertyHandler(NamedPropertyGetter getter,
query,
remover,
enumerator,
- false,
- data);
-}
-
-
-void ObjectTemplate::SetFallbackPropertyHandler(NamedPropertyGetter getter,
- NamedPropertySetter setter,
- NamedPropertyQuery query,
- NamedPropertyDeleter remover,
- NamedPropertyEnumerator enumerator,
- Handle<Value> data) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetNamedPropertyHandler()")) {
- return;
- }
- ENTER_V8(isolate);
- i::HandleScope scope(isolate);
- EnsureConstructor(this);
- i::FunctionTemplateInfo* constructor =
- i::FunctionTemplateInfo::cast(Utils::OpenHandle(this)->constructor());
- i::Handle<i::FunctionTemplateInfo> cons(constructor);
- Utils::ToLocal(cons)->SetNamedInstancePropertyHandler(getter,
- setter,
- query,
- remover,
- enumerator,
- true,
data);
}
@@ -1436,50 +1436,11 @@ void ObjectTemplate::SetInternalFieldCount(int value) {
}
-bool ObjectTemplate::HasExternalResource()
-{
- if (IsDeadCheck(Utils::OpenHandle(this)->GetIsolate(),
- "v8::ObjectTemplate::HasExternalResource()")) {
- return 0;
- }
- return !Utils::OpenHandle(this)->has_external_resource()->IsUndefined();
-}
-
-
-void ObjectTemplate::SetHasExternalResource(bool value)
-{
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::SetHasExternalResource()")) {
- return;
- }
- ENTER_V8(isolate);
- if (value) {
- EnsureConstructor(this);
- }
- if (value) {
- Utils::OpenHandle(this)->set_has_external_resource(i::Smi::FromInt(1));
- } else {
- Utils::OpenHandle(this)->set_has_external_resource(Utils::OpenHandle(this)->GetHeap()->undefined_value());
- }
-}
-
-
-void ObjectTemplate::MarkAsUseUserObjectComparison()
-{
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- if (IsDeadCheck(isolate, "v8::ObjectTemplate::MarkAsUseUserObjectComparison()")) {
- return;
- }
- ENTER_V8(isolate);
- EnsureConstructor(this);
- Utils::OpenHandle(this)->set_use_user_object_comparison(i::Smi::FromInt(1));
-}
-
// --- S c r i p t D a t a ---
ScriptData* ScriptData::PreCompile(const char* input, int length) {
- i::Utf8ToUC16CharacterStream stream(
+ i::Utf8ToUtf16CharacterStream stream(
reinterpret_cast<const unsigned char*>(input), length);
return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_scoping);
}
@@ -1488,11 +1449,11 @@ ScriptData* ScriptData::PreCompile(const char* input, int length) {
ScriptData* ScriptData::PreCompile(v8::Handle<String> source) {
i::Handle<i::String> str = Utils::OpenHandle(*source);
if (str->IsExternalTwoByteString()) {
- i::ExternalTwoByteStringUC16CharacterStream stream(
+ i::ExternalTwoByteStringUtf16CharacterStream stream(
i::Handle<i::ExternalTwoByteString>::cast(str), 0, str->length());
return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_scoping);
} else {
- i::GenericStringUC16CharacterStream stream(str, 0, str->length());
+ i::GenericStringUtf16CharacterStream stream(str, 0, str->length());
return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_scoping);
}
}
@@ -1525,37 +1486,40 @@ ScriptData* ScriptData::New(const char* data, int length) {
Local<Script> Script::New(v8::Handle<String> source,
v8::ScriptOrigin* origin,
v8::ScriptData* pre_data,
- v8::Handle<String> script_data,
- v8::Script::CompileFlags compile_flags) {
+ v8::Handle<String> script_data) {
i::Isolate* isolate = i::Isolate::Current();
ON_BAILOUT(isolate, "v8::Script::New()", return Local<Script>());
LOG_API(isolate, "Script::New");
ENTER_V8(isolate);
- i::Handle<i::String> str = Utils::OpenHandle(*source);
- i::Handle<i::Object> name_obj;
- int line_offset = 0;
- int column_offset = 0;
- if (origin != NULL) {
- if (!origin->ResourceName().IsEmpty()) {
- name_obj = Utils::OpenHandle(*origin->ResourceName());
- }
- if (!origin->ResourceLineOffset().IsEmpty()) {
- line_offset = static_cast<int>(origin->ResourceLineOffset()->Value());
+ i::SharedFunctionInfo* raw_result = NULL;
+ { i::HandleScope scope(isolate);
+ i::Handle<i::String> str = Utils::OpenHandle(*source);
+ i::Handle<i::Object> name_obj;
+ int line_offset = 0;
+ int column_offset = 0;
+ if (origin != NULL) {
+ if (!origin->ResourceName().IsEmpty()) {
+ name_obj = Utils::OpenHandle(*origin->ResourceName());
+ }
+ if (!origin->ResourceLineOffset().IsEmpty()) {
+ line_offset = static_cast<int>(origin->ResourceLineOffset()->Value());
+ }
+ if (!origin->ResourceColumnOffset().IsEmpty()) {
+ column_offset =
+ static_cast<int>(origin->ResourceColumnOffset()->Value());
+ }
}
- if (!origin->ResourceColumnOffset().IsEmpty()) {
- column_offset = static_cast<int>(origin->ResourceColumnOffset()->Value());
+ EXCEPTION_PREAMBLE(isolate);
+ i::ScriptDataImpl* pre_data_impl =
+ static_cast<i::ScriptDataImpl*>(pre_data);
+ // We assert that the pre-data is sane, even though we can actually
+ // handle it if it turns out not to be in release mode.
+ ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
+ // If the pre-data isn't sane we simply ignore it
+ if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
+ pre_data_impl = NULL;
}
- }
- EXCEPTION_PREAMBLE(isolate);
- i::ScriptDataImpl* pre_data_impl = static_cast<i::ScriptDataImpl*>(pre_data);
- // We assert that the pre-data is sane, even though we can actually
- // handle it if it turns out not to be in release mode.
- ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
- // If the pre-data isn't sane we simply ignore it
- if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
- pre_data_impl = NULL;
- }
- i::Handle<i::SharedFunctionInfo> result =
+ i::Handle<i::SharedFunctionInfo> result =
i::Compiler::Compile(str,
name_obj,
line_offset,
@@ -1563,32 +1527,32 @@ Local<Script> Script::New(v8::Handle<String> source,
NULL,
pre_data_impl,
Utils::OpenHandle(*script_data),
- i::NOT_NATIVES_CODE,
- compile_flags);
- has_pending_exception = result.is_null();
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
+ i::NOT_NATIVES_CODE);
+ has_pending_exception = result.is_null();
+ EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
+ raw_result = *result;
+ }
+ i::Handle<i::SharedFunctionInfo> result(raw_result, isolate);
return Local<Script>(ToApi<Script>(result));
}
Local<Script> Script::New(v8::Handle<String> source,
- v8::Handle<Value> file_name,
- v8::Script::CompileFlags compile_flags) {
+ v8::Handle<Value> file_name) {
ScriptOrigin origin(file_name);
- return New(source, &origin, 0, Handle<String>(), compile_flags);
+ return New(source, &origin);
}
Local<Script> Script::Compile(v8::Handle<String> source,
v8::ScriptOrigin* origin,
v8::ScriptData* pre_data,
- v8::Handle<String> script_data,
- v8::Script::CompileFlags compile_flags) {
+ v8::Handle<String> script_data) {
i::Isolate* isolate = i::Isolate::Current();
ON_BAILOUT(isolate, "v8::Script::Compile()", return Local<Script>());
LOG_API(isolate, "Script::Compile");
ENTER_V8(isolate);
- Local<Script> generic = New(source, origin, pre_data, script_data, compile_flags);
+ Local<Script> generic = New(source, origin, pre_data, script_data);
if (generic.IsEmpty())
return generic;
i::Handle<i::Object> obj = Utils::OpenHandle(*generic);
@@ -1604,18 +1568,13 @@ Local<Script> Script::Compile(v8::Handle<String> source,
Local<Script> Script::Compile(v8::Handle<String> source,
v8::Handle<Value> file_name,
- v8::Handle<String> script_data,
- v8::Script::CompileFlags compile_flags) {
+ v8::Handle<String> script_data) {
ScriptOrigin origin(file_name);
- return Compile(source, &origin, 0, script_data, compile_flags);
+ return Compile(source, &origin, 0, script_data);
}
Local<Value> Script::Run() {
- return Run(Handle<Object>());
-}
-
-Local<Value> Script::Run(Handle<Object> qml) {
i::Isolate* isolate = i::Isolate::Current();
ON_BAILOUT(isolate, "v8::Script::Run()", return Local<Value>());
LOG_API(isolate, "Script::Run");
@@ -1634,12 +1593,11 @@ Local<Value> Script::Run(Handle<Object> qml) {
fun = i::Handle<i::JSFunction>(i::JSFunction::cast(*obj), isolate);
}
EXCEPTION_PREAMBLE(isolate);
- i::Handle<i::Object> qmlglobal = Utils::OpenHandle(*qml);
i::Handle<i::Object> receiver(
isolate->context()->global_proxy(), isolate);
i::Handle<i::Object> result =
- i::Execution::Call(fun, receiver, 0, NULL, &has_pending_exception, false, qmlglobal);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>());
+ i::Execution::Call(fun, receiver, 0, NULL, &has_pending_exception);
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Value>());
raw_result = *result;
}
i::Handle<i::Object> result(raw_result, isolate);
@@ -2225,6 +2183,11 @@ bool Value::IsInt32() const {
if (obj->IsSmi()) return true;
if (obj->IsNumber()) {
double value = obj->Number();
+ static const i::DoubleRepresentation minus_zero(-0.0);
+ i::DoubleRepresentation rep(value);
+ if (rep.bits == minus_zero.bits) {
+ return false;
+ }
return i::FastI2D(i::FastD2I(value)) == value;
}
return false;
@@ -2237,6 +2200,11 @@ bool Value::IsUint32() const {
if (obj->IsSmi()) return i::Smi::cast(*obj)->value() >= 0;
if (obj->IsNumber()) {
double value = obj->Number();
+ static const i::DoubleRepresentation minus_zero(-0.0);
+ i::DoubleRepresentation rep(value);
+ if (rep.bits == minus_zero.bits) {
+ return false;
+ }
return i::FastUI2D(i::FastD2UI(value)) == value;
}
return false;
@@ -2316,12 +2284,6 @@ bool Value::IsRegExp() const {
return obj->IsJSRegExp();
}
-bool Value::IsError() const {
- if (IsDeadCheck(i::Isolate::Current(), "v8::Value::IsError()")) return false;
- i::Handle<i::Object> obj = Utils::OpenHandle(this);
- return obj->HasSpecificClassOf(HEAP->Error_symbol());
-}
-
Local<String> Value::ToString() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
@@ -2805,10 +2767,11 @@ bool v8::Object::Set(uint32_t index, v8::Handle<Value> value) {
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
EXCEPTION_PREAMBLE(isolate);
- i::Handle<i::Object> obj = i::SetElement(
+ i::Handle<i::Object> obj = i::JSObject::SetElement(
self,
index,
value_obj,
+ NONE,
i::kNonStrictMode);
has_pending_exception = obj.is_null();
EXCEPTION_BAILOUT_CHECK(isolate, false);
@@ -2911,7 +2874,7 @@ Local<Value> v8::Object::GetPrototype() {
return Local<v8::Value>());
ENTER_V8(isolate);
i::Handle<i::Object> self = Utils::OpenHandle(this);
- i::Handle<i::Object> result = i::GetPrototype(self);
+ i::Handle<i::Object> result(self->GetPrototype());
return Utils::ToLocal(result);
}
@@ -3065,7 +3028,7 @@ bool v8::Object::Delete(v8::Handle<String> key) {
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
- return i::DeleteProperty(self, key_obj)->IsTrue();
+ return i::JSObject::DeleteProperty(self, key_obj)->IsTrue();
}
@@ -3086,7 +3049,7 @@ bool v8::Object::Delete(uint32_t index) {
ENTER_V8(isolate);
HandleScope scope;
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- return i::DeleteElement(self, index)->IsTrue();
+ return i::JSObject::DeleteElement(self, index)->IsTrue();
}
@@ -3111,8 +3074,11 @@ bool Object::SetAccessor(Handle<String> name,
i::Handle<i::AccessorInfo> info = MakeAccessorInfo(name,
getter, setter, data,
settings, attributes);
+ bool fast = Utils::OpenHandle(this)->HasFastProperties();
i::Handle<i::Object> result = i::SetAccessor(Utils::OpenHandle(this), info);
- return !result.is_null() && !result->IsUndefined();
+ if (result.is_null() || result->IsUndefined()) return false;
+ if (fast) i::JSObject::TransformToFastProperties(Utils::OpenHandle(this), 0);
+ return true;
}
@@ -3291,7 +3257,7 @@ int v8::Object::GetIdentityHash() {
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- return i::GetIdentityHash(self);
+ return i::JSObject::GetIdentityHash(self);
}
@@ -3304,7 +3270,8 @@ bool v8::Object::SetHiddenValue(v8::Handle<v8::String> key,
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::String> key_obj = Utils::OpenHandle(*key);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
- i::Handle<i::Object> result = i::SetHiddenProperty(self, key_obj, value_obj);
+ i::Handle<i::Object> result =
+ i::JSObject::SetHiddenProperty(self, key_obj, value_obj);
return *result == *self;
}
@@ -3571,7 +3538,7 @@ Local<v8::Value> Object::CallAsFunction(v8::Handle<v8::Object> recv,
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> returned =
i::Execution::Call(fun, recv_obj, argc, args, &has_pending_exception);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Value>());
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Value>());
return Utils::ToLocal(scope.CloseAndEscape(returned));
}
@@ -3592,7 +3559,7 @@ Local<v8::Value> Object::CallAsConstructor(int argc,
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> returned =
i::Execution::New(fun, argc, args, &has_pending_exception);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>());
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>());
return Utils::ToLocal(scope.CloseAndEscape(
i::Handle<i::JSObject>::cast(returned)));
}
@@ -3605,7 +3572,7 @@ Local<v8::Value> Object::CallAsConstructor(int argc,
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> returned =
i::Execution::Call(fun, obj, argc, args, &has_pending_exception);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>());
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>());
ASSERT(!delegate->IsUndefined());
return Utils::ToLocal(scope.CloseAndEscape(returned));
}
@@ -3632,7 +3599,7 @@ Local<v8::Object> Function::NewInstance(int argc,
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> returned =
i::Execution::New(function, argc, args, &has_pending_exception);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<v8::Object>());
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<v8::Object>());
return scope.Close(Utils::ToLocal(i::Handle<i::JSObject>::cast(returned)));
}
@@ -3653,7 +3620,7 @@ Local<v8::Value> Function::Call(v8::Handle<v8::Object> recv, int argc,
EXCEPTION_PREAMBLE(isolate);
i::Handle<i::Object> returned =
i::Execution::Call(fun, recv_obj, argc, args, &has_pending_exception);
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Object>());
+ EXCEPTION_BAILOUT_CHECK_DO_CALLBACK(isolate, Local<Object>());
raw_result = *returned;
}
i::Handle<i::Object> result(raw_result);
@@ -3676,6 +3643,12 @@ Handle<Value> Function::GetName() const {
}
+Handle<Value> Function::GetInferredName() const {
+ i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
+ return Utils::ToLocal(i::Handle<i::Object>(func->shared()->inferred_name()));
+}
+
+
ScriptOrigin Function::GetScriptOrigin() const {
i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
if (func->shared()->script()->IsScript()) {
@@ -3703,6 +3676,23 @@ int Function::GetScriptLineNumber() const {
}
+int Function::GetScriptColumnNumber() const {
+ i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
+ if (func->shared()->script()->IsScript()) {
+ i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
+ return i::GetScriptColumnNumber(script, func->shared()->start_position());
+ }
+ return kLineOffsetNotFound;
+}
+
+Handle<Value> Function::GetScriptId() const {
+ i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
+ if (!func->shared()->script()->IsScript())
+ return v8::Undefined();
+ i::Handle<i::Script> script(i::Script::cast(func->shared()->script()));
+ return Utils::ToLocal(i::Handle<i::Object>(script->id()));
+}
+
int String::Length() const {
i::Handle<i::String> str = Utils::OpenHandle(this);
if (IsDeadCheck(str->GetIsolate(), "v8::String::Length()")) return 0;
@@ -3713,58 +3703,104 @@ int String::Length() const {
int String::Utf8Length() const {
i::Handle<i::String> str = Utils::OpenHandle(this);
if (IsDeadCheck(str->GetIsolate(), "v8::String::Utf8Length()")) return 0;
- return str->Utf8Length();
-}
-
-
-uint32_t String::Hash() const {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::Hash()")) return 0;
- return str->Hash();
-}
-
-
-String::CompleteHashData String::CompleteHash() const {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::CompleteHash()")) return CompleteHashData();
- CompleteHashData result;
- result.length = str->length();
- result.hash = str->Hash();
- if (str->IsSeqString())
- result.symbol_id = i::SeqString::cast(*str)->symbol_id();
- return result;
-}
-
-
-uint32_t String::ComputeHash(uint16_t *string, int length) {
- return i::HashSequentialString<i::uc16>(string, length) >> i::String::kHashShift;
-}
-
-
-uint32_t String::ComputeHash(char *string, int length) {
- return i::HashSequentialString<char>(string, length) >> i::String::kHashShift;
-}
-
-
-uint16_t String::GetCharacter(int index)
-{
- i::Handle<i::String> str = Utils::OpenHandle(this);
- return str->Get(index);
-}
-
-
-bool String::Equals(uint16_t *string, int length) {
- i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::Equals()")) return 0;
- return str->SlowEqualsExternal(string, length);
+ return i::Utf8Length(str);
+}
+
+
+// Will fail with a negative answer if the recursion depth is too high.
+static int RecursivelySerializeToUtf8(i::String* string,
+ char* buffer,
+ int start,
+ int end,
+ int recursion_budget,
+ int32_t previous_character,
+ int32_t* last_character) {
+ int utf8_bytes = 0;
+ while (true) {
+ if (string->IsAsciiRepresentation()) {
+ i::String::WriteToFlat(string, buffer, start, end);
+ *last_character = unibrow::Utf16::kNoPreviousCharacter;
+ return utf8_bytes + end - start;
+ }
+ switch (i::StringShape(string).representation_tag()) {
+ case i::kExternalStringTag: {
+ const uint16_t* data = i::ExternalTwoByteString::cast(string)->
+ ExternalTwoByteStringGetData(0);
+ char* current = buffer;
+ for (int i = start; i < end; i++) {
+ uint16_t character = data[i];
+ current +=
+ unibrow::Utf8::Encode(current, character, previous_character);
+ previous_character = character;
+ }
+ *last_character = previous_character;
+ return static_cast<int>(utf8_bytes + current - buffer);
+ }
+ case i::kSeqStringTag: {
+ const uint16_t* data =
+ i::SeqTwoByteString::cast(string)->SeqTwoByteStringGetData(0);
+ char* current = buffer;
+ for (int i = start; i < end; i++) {
+ uint16_t character = data[i];
+ current +=
+ unibrow::Utf8::Encode(current, character, previous_character);
+ previous_character = character;
+ }
+ *last_character = previous_character;
+ return static_cast<int>(utf8_bytes + current - buffer);
+ }
+ case i::kSlicedStringTag: {
+ i::SlicedString* slice = i::SlicedString::cast(string);
+ unsigned offset = slice->offset();
+ string = slice->parent();
+ start += offset;
+ end += offset;
+ continue;
+ }
+ case i::kConsStringTag: {
+ i::ConsString* cons_string = i::ConsString::cast(string);
+ i::String* first = cons_string->first();
+ int boundary = first->length();
+ if (start >= boundary) {
+ // Only need RHS.
+ string = cons_string->second();
+ start -= boundary;
+ end -= boundary;
+ continue;
+ } else if (end <= boundary) {
+ // Only need LHS.
+ string = first;
+ } else {
+ if (recursion_budget == 0) return -1;
+ int extra_utf8_bytes =
+ RecursivelySerializeToUtf8(first,
+ buffer,
+ start,
+ boundary,
+ recursion_budget - 1,
+ previous_character,
+ &previous_character);
+ if (extra_utf8_bytes < 0) return extra_utf8_bytes;
+ buffer += extra_utf8_bytes;
+ utf8_bytes += extra_utf8_bytes;
+ string = cons_string->second();
+ start = 0;
+ end -= boundary;
+ }
+ }
+ }
+ }
+ UNREACHABLE();
+ return 0;
}
-bool String::Equals(char *string, int length)
-{
+bool String::MayContainNonAscii() const {
i::Handle<i::String> str = Utils::OpenHandle(this);
- if (IsDeadCheck(str->GetIsolate(), "v8::String::Equals()")) return 0;
- return str->SlowEqualsExternal(string, length);
+ if (IsDeadCheck(str->GetIsolate(), "v8::String::MayContainNonAscii()")) {
+ return false;
+ }
+ return !str->HasOnlyAsciiChars();
}
@@ -3777,11 +3813,12 @@ int String::WriteUtf8(char* buffer,
LOG_API(isolate, "String::WriteUtf8");
ENTER_V8(isolate);
i::Handle<i::String> str = Utils::OpenHandle(this);
+ int string_length = str->length();
if (str->IsAsciiRepresentation()) {
int len;
if (capacity == -1) {
capacity = str->length() + 1;
- len = str->length();
+ len = string_length;
} else {
len = i::Min(capacity, str->length());
}
@@ -3794,6 +3831,42 @@ int String::WriteUtf8(char* buffer,
return len;
}
+ if (capacity == -1 || capacity / 3 >= string_length) {
+ int32_t previous = unibrow::Utf16::kNoPreviousCharacter;
+ const int kMaxRecursion = 100;
+ int utf8_bytes =
+ RecursivelySerializeToUtf8(*str,
+ buffer,
+ 0,
+ string_length,
+ kMaxRecursion,
+ previous,
+ &previous);
+ if (utf8_bytes >= 0) {
+ // Success serializing with recursion.
+ if ((options & NO_NULL_TERMINATION) == 0 &&
+ (capacity > utf8_bytes || capacity == -1)) {
+ buffer[utf8_bytes++] = '\0';
+ }
+ if (nchars_ref != NULL) *nchars_ref = string_length;
+ return utf8_bytes;
+ }
+ FlattenString(str);
+ // Recurse once. This time around the string is flat and the serializing
+ // with recursion will certainly succeed.
+ return WriteUtf8(buffer, capacity, nchars_ref, options);
+ } else if (capacity >= string_length) {
+ // First check that the buffer is large enough. If it is, then recurse
+ // once without a capacity limit, which will get into the other branch of
+ // this 'if'.
+ int utf8_bytes = i::Utf8Length(str);
+ if ((options & NO_NULL_TERMINATION) == 0) utf8_bytes++;
+ if (utf8_bytes <= capacity) {
+ return WriteUtf8(buffer, -1, nchars_ref, options);
+ }
+ }
+
+ // Slow case.
i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
isolate->string_tracker()->RecordWrite(str);
if (options & HINT_MANY_WRITES_EXPECTED) {
@@ -3810,11 +3883,13 @@ int String::WriteUtf8(char* buffer,
int i;
int pos = 0;
int nchars = 0;
+ int previous = unibrow::Utf16::kNoPreviousCharacter;
for (i = 0; i < len && (capacity == -1 || pos < fast_end); i++) {
i::uc32 c = write_input_buffer.GetNext();
- int written = unibrow::Utf8::Encode(buffer + pos, c);
+ int written = unibrow::Utf8::Encode(buffer + pos, c, previous);
pos += written;
nchars++;
+ previous = c;
}
if (i < len) {
// For the last characters we need to check the length for each one
@@ -3823,16 +3898,33 @@ int String::WriteUtf8(char* buffer,
char intermediate[unibrow::Utf8::kMaxEncodedSize];
for (; i < len && pos < capacity; i++) {
i::uc32 c = write_input_buffer.GetNext();
- int written = unibrow::Utf8::Encode(intermediate, c);
- if (pos + written <= capacity) {
- for (int j = 0; j < written; j++)
- buffer[pos + j] = intermediate[j];
+ if (unibrow::Utf16::IsTrailSurrogate(c) &&
+ unibrow::Utf16::IsLeadSurrogate(previous)) {
+ // We can't use the intermediate buffer here because the encoding
+ // of surrogate pairs is done under assumption that you can step
+ // back and fix the UTF8 stream. Luckily we only need space for one
+ // more byte, so there is always space.
+ ASSERT(pos < capacity);
+ int written = unibrow::Utf8::Encode(buffer + pos, c, previous);
+ ASSERT(written == 1);
pos += written;
nchars++;
} else {
- // We've reached the end of the buffer
- break;
+ int written =
+ unibrow::Utf8::Encode(intermediate,
+ c,
+ unibrow::Utf16::kNoPreviousCharacter);
+ if (pos + written <= capacity) {
+ for (int j = 0; j < written; j++)
+ buffer[pos + j] = intermediate[j];
+ pos += written;
+ nchars++;
+ } else {
+ // We've reached the end of the buffer
+ break;
+ }
}
+ previous = c;
}
}
if (nchars_ref != NULL) *nchars_ref = nchars;
@@ -4084,34 +4176,6 @@ void v8::Object::SetPointerInInternalField(int index, void* value) {
}
-void v8::Object::SetExternalResource(v8::Object::ExternalResource *resource) {
- i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
- ENTER_V8(isolate);
- i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
- if (CanBeEncodedAsSmi(resource)) {
- obj->SetExternalResourceObject(EncodeAsSmi(resource));
- } else {
- obj->SetExternalResourceObject(*isolate->factory()->NewForeign(static_cast<i::Address>((void *)resource)));
- }
- if (!obj->IsSymbol()) {
- isolate->heap()->external_string_table()->AddObject(*obj);
- }
-}
-
-
-v8::Object::ExternalResource *v8::Object::GetExternalResource() {
- i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
- i::Object* value = obj->GetExternalResourceObject();
- if (value->IsSmi()) {
- return reinterpret_cast<v8::Object::ExternalResource*>(i::Internals::GetExternalPointerFromSmi(value));
- } else if (value->IsForeign()) {
- return reinterpret_cast<v8::Object::ExternalResource*>(i::Foreign::cast(value)->foreign_address());
- } else {
- return NULL;
- }
-}
-
-
// --- E n v i r o n m e n t ---
@@ -4129,6 +4193,12 @@ void v8::V8::SetEntropySource(EntropySource source) {
}
+void v8::V8::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver return_address_resolver) {
+ i::V8::SetReturnAddressLocationResolver(return_address_resolver);
+}
+
+
bool v8::V8::Dispose() {
i::Isolate* isolate = i::Isolate::Current();
if (!ApiCheck(isolate != NULL && isolate->IsDefaultIsolate(),
@@ -4166,19 +4236,26 @@ void v8::V8::GetHeapStatistics(HeapStatistics* heap_statistics) {
}
-bool v8::V8::IdleNotification() {
+void v8::V8::VisitExternalResources(ExternalResourceVisitor* visitor) {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::V8::VisitExternalResources");
+ isolate->heap()->VisitExternalResources(visitor);
+}
+
+
+bool v8::V8::IdleNotification(int hint) {
// Returning true tells the caller that it need not
// continue to call IdleNotification.
i::Isolate* isolate = i::Isolate::Current();
if (isolate == NULL || !isolate->IsInitialized()) return true;
- return i::V8::IdleNotification();
+ return i::V8::IdleNotification(hint);
}
void v8::V8::LowMemoryNotification() {
i::Isolate* isolate = i::Isolate::Current();
if (isolate == NULL || !isolate->IsInitialized()) return;
- isolate->heap()->CollectAllAvailableGarbage();
+ isolate->heap()->CollectAllAvailableGarbage("low memory notification");
}
@@ -4210,6 +4287,7 @@ Persistent<Context> v8::Context::New(
v8::ExtensionConfiguration* extensions,
v8::Handle<ObjectTemplate> global_template,
v8::Handle<Value> global_object) {
+ i::Isolate::EnsureDefaultIsolate();
i::Isolate* isolate = i::Isolate::Current();
EnsureInitializedForIsolate(isolate, "v8::Context::New()");
LOG_API(isolate, "Context::New");
@@ -4364,37 +4442,6 @@ v8::Local<v8::Context> Context::GetCalling() {
}
-v8::Local<v8::Object> Context::GetCallingQmlGlobal() {
- i::Isolate* isolate = i::Isolate::Current();
- if (IsDeadCheck(isolate, "v8::Context::GetCallingQmlGlobal()")) {
- return Local<Object>();
- }
-
- i::Context *context = isolate->context();
- i::JavaScriptFrameIterator it;
- if (it.done()) return Local<Object>();
- context = i::Context::cast(it.frame()->context());
- if (!context->qml_global()->IsUndefined()) {
- i::Handle<i::Object> qmlglobal(context->qml_global());
- return Utils::ToLocal(i::Handle<i::JSObject>::cast(qmlglobal));
- } else {
- return Local<Object>();
- }
-}
-
-v8::Local<v8::Value> Context::GetCallingScriptData()
-{
- i::Isolate* isolate = i::Isolate::Current();
- if (IsDeadCheck(isolate, "v8::Context::GetCallingScriptData()")) {
- return Local<Object>();
- }
-
- i::JavaScriptFrameIterator it;
- if (it.done()) return Local<Object>();
- i::Handle<i::Script> script(i::Script::cast(i::JSFunction::cast(it.frame()->function())->shared()->script()));
- return Utils::ToLocal(i::Handle<i::Object>(script->data()));
-}
-
v8::Local<v8::Object> Context::Global() {
if (IsDeadCheck(i::Isolate::Current(), "v8::Context::Global()")) {
return Local<v8::Object>();
@@ -4445,6 +4492,20 @@ void Context::AllowCodeGenerationFromStrings(bool allow) {
}
+bool Context::IsCodeGenerationFromStringsAllowed() {
+ i::Isolate* isolate = i::Isolate::Current();
+ if (IsDeadCheck(isolate,
+ "v8::Context::IsCodeGenerationFromStringsAllowed()")) {
+ return false;
+ }
+ ENTER_V8(isolate);
+ i::Object** ctx = reinterpret_cast<i::Object**>(this);
+ i::Handle<i::Context> context =
+ i::Handle<i::Context>::cast(i::Handle<i::Object>(ctx));
+ return !context->allow_code_gen_from_strings()->IsFalse();
+}
+
+
void V8::SetWrapperClassId(i::Object** global_handle, uint16_t class_id) {
i::GlobalHandles::SetWrapperClassId(global_handle, class_id);
}
@@ -4733,12 +4794,9 @@ bool v8::String::CanMakeExternal() {
i::Handle<i::String> obj = Utils::OpenHandle(this);
i::Isolate* isolate = obj->GetIsolate();
if (IsDeadCheck(isolate, "v8::String::CanMakeExternal()")) return false;
- if (isolate->string_tracker()->IsFreshUnusedString(obj)) {
- return false;
- }
+ if (isolate->string_tracker()->IsFreshUnusedString(obj)) return false;
int size = obj->Size(); // Byte size of the original string.
- if (size < i::ExternalString::kSize)
- return false;
+ if (size < i::ExternalString::kShortSize) return false;
i::StringShape shape(*obj);
return !shape.IsExternal();
}
@@ -4844,8 +4902,8 @@ double v8::Date::NumberValue() const {
if (IsDeadCheck(isolate, "v8::Date::NumberValue()")) return 0;
LOG_API(isolate, "Date::NumberValue");
i::Handle<i::Object> obj = Utils::OpenHandle(this);
- i::Handle<i::JSValue> jsvalue = i::Handle<i::JSValue>::cast(obj);
- return jsvalue->value()->Number();
+ i::Handle<i::JSDate> jsdate = i::Handle<i::JSDate>::cast(obj);
+ return jsdate->value()->Number();
}
@@ -4856,8 +4914,10 @@ void v8::Date::DateTimeConfigurationChangeNotification() {
LOG_API(isolate, "Date::DateTimeConfigurationChangeNotification");
ENTER_V8(isolate);
+ isolate->date_cache()->ResetDateCache();
+
i::HandleScope scope(isolate);
- // Get the function ResetDateCache (defined in date-delay.js).
+ // Get the function ResetDateCache (defined in date.js).
i::Handle<i::String> func_name_str =
isolate->factory()->LookupAsciiSymbol("ResetDateCache");
i::MaybeObject* result =
@@ -5125,17 +5185,6 @@ void V8::SetFailedAccessCheckCallbackFunction(
isolate->SetFailedAccessCheckCallback(callback);
}
-
-void V8::SetUserObjectComparisonCallbackFunction(
- UserObjectComparisonCallback callback) {
- i::Isolate* isolate = i::Isolate::Current();
- if (IsDeadCheck(isolate, "v8::V8::SetUserObjectComparisonCallbackFunction()")) {
- return;
- }
- isolate->SetUserObjectComparisonCallback(callback);
-}
-
-
void V8::AddObjectGroup(Persistent<Value>* objects,
size_t length,
RetainedObjectInfo* info) {
@@ -5229,6 +5278,23 @@ void V8::RemoveMemoryAllocationCallback(MemoryAllocationCallback callback) {
}
+void V8::AddCallCompletedCallback(CallCompletedCallback callback) {
+ if (callback == NULL) return;
+ i::Isolate::EnsureDefaultIsolate();
+ i::Isolate* isolate = i::Isolate::Current();
+ if (IsDeadCheck(isolate, "v8::V8::AddLeaveScriptCallback()")) return;
+ i::V8::AddCallCompletedCallback(callback);
+}
+
+
+void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) {
+ i::Isolate::EnsureDefaultIsolate();
+ i::Isolate* isolate = i::Isolate::Current();
+ if (IsDeadCheck(isolate, "v8::V8::RemoveLeaveScriptCallback()")) return;
+ i::V8::RemoveCallCompletedCallback(callback);
+}
+
+
void V8::PauseProfiler() {
i::Isolate* isolate = i::Isolate::Current();
isolate->logger()->PauseProfiler();
@@ -5343,7 +5409,8 @@ String::Utf8Value::Utf8Value(v8::Handle<v8::Value> obj)
TryCatch try_catch;
Handle<String> str = obj->ToString();
if (str.IsEmpty()) return;
- length_ = str->Utf8Length();
+ i::Handle<i::String> i_str = Utils::OpenHandle(*str);
+ length_ = i::Utf8Length(i_str);
str_ = i::NewArray<char>(length_ + 1);
str->WriteUtf8(str_);
}
@@ -5700,7 +5767,7 @@ void Debug::DisableAgent() {
void Debug::ProcessDebugMessages() {
- i::Execution::ProcessDebugMesssages(true);
+ i::Execution::ProcessDebugMessages(true);
}
Local<Context> Debug::GetDebugContext() {
@@ -5965,7 +6032,7 @@ Handle<String> HeapGraphNode::GetName() const {
}
-uint64_t HeapGraphNode::GetId() const {
+SnapshotObjectId HeapGraphNode::GetId() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapGraphNode::GetId");
return ToInternal(this)->id();
@@ -5979,10 +6046,10 @@ int HeapGraphNode::GetSelfSize() const {
}
-int HeapGraphNode::GetRetainedSize(bool exact) const {
+int HeapGraphNode::GetRetainedSize() const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetRetainedSize");
- return ToInternal(this)->RetainedSize(exact);
+ return ToInternal(this)->retained_size();
}
@@ -6080,7 +6147,7 @@ const HeapGraphNode* HeapSnapshot::GetRoot() const {
}
-const HeapGraphNode* HeapSnapshot::GetNodeById(uint64_t id) const {
+const HeapGraphNode* HeapSnapshot::GetNodeById(SnapshotObjectId id) const {
i::Isolate* isolate = i::Isolate::Current();
IsDeadCheck(isolate, "v8::HeapSnapshot::GetNodeById");
return reinterpret_cast<const HeapGraphNode*>(
@@ -6103,6 +6170,13 @@ const HeapGraphNode* HeapSnapshot::GetNode(int index) const {
}
+SnapshotObjectId HeapSnapshot::GetMaxSnapshotJSObjectId() const {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapSnapshot::GetMaxSnapshotJSObjectId");
+ return ToInternal(this)->max_snapshot_js_object_id();
+}
+
+
void HeapSnapshot::Serialize(OutputStream* stream,
HeapSnapshot::SerializationFormat format) const {
i::Isolate* isolate = i::Isolate::Current();
@@ -6177,6 +6251,11 @@ void HeapProfiler::DefineWrapperClass(uint16_t class_id,
}
+int HeapProfiler::GetPersistentHandleCount() {
+ i::Isolate* isolate = i::Isolate::Current();
+ return isolate->global_handles()->NumberOfGlobalHandles();
+}
+
v8::Testing::StressType internal::Testing::stress_type_ =
v8::Testing::kStressTypeOpt;
@@ -6205,9 +6284,7 @@ static void SetFlagsFromString(const char* flags) {
void Testing::PrepareStressRun(int run) {
static const char* kLazyOptimizations =
- "--prepare-always-opt --nolimit-inlining "
- "--noalways-opt --noopt-eagerly";
- static const char* kEagerOptimizations = "--opt-eagerly";
+ "--prepare-always-opt --nolimit-inlining --noalways-opt";
static const char* kForcedOptimizations = "--always-opt";
// If deoptimization stressed turn on frequent deoptimization. If no value
@@ -6224,15 +6301,12 @@ void Testing::PrepareStressRun(int run) {
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
} else {
- SetFlagsFromString(kEagerOptimizations);
SetFlagsFromString(kLazyOptimizations);
}
#else
if (run == GetStressRuns() - 1) {
SetFlagsFromString(kForcedOptimizations);
- } else if (run == GetStressRuns() - 2) {
- SetFlagsFromString(kEagerOptimizations);
- } else {
+ } else if (run != GetStressRuns() - 2) {
SetFlagsFromString(kLazyOptimizations);
}
#endif
diff --git a/src/3rdparty/v8/src/api.h b/src/3rdparty/v8/src/api.h
index f41c96e..3ad57f4 100644
--- a/src/3rdparty/v8/src/api.h
+++ b/src/3rdparty/v8/src/api.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,10 +28,14 @@
#ifndef V8_API_H_
#define V8_API_H_
-#include "apiutils.h"
-#include "factory.h"
+#include "v8.h"
#include "../include/v8-testing.h"
+#include "apiutils.h"
+#include "contexts.h"
+#include "factory.h"
+#include "isolate.h"
+#include "list-inl.h"
namespace v8 {
@@ -112,7 +116,7 @@ void NeanderObject::set(int offset, v8::internal::Object* value) {
}
-template <typename T> static inline T ToCData(v8::internal::Object* obj) {
+template <typename T> inline T ToCData(v8::internal::Object* obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
return reinterpret_cast<T>(
reinterpret_cast<intptr_t>(
@@ -121,7 +125,7 @@ template <typename T> static inline T ToCData(v8::internal::Object* obj) {
template <typename T>
-static inline v8::internal::Handle<v8::internal::Object> FromCData(T obj) {
+inline v8::internal::Handle<v8::internal::Object> FromCData(T obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
return FACTORY->NewForeign(
reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(obj)));
@@ -137,26 +141,20 @@ class ApiFunction {
};
-enum ExtensionTraversalState {
- UNVISITED, VISITED, INSTALLED
-};
-
class RegisteredExtension {
public:
explicit RegisteredExtension(Extension* extension);
static void Register(RegisteredExtension* that);
+ static void UnregisterAll();
Extension* extension() { return extension_; }
RegisteredExtension* next() { return next_; }
RegisteredExtension* next_auto() { return next_auto_; }
- ExtensionTraversalState state() { return state_; }
- void set_state(ExtensionTraversalState value) { state_ = value; }
static RegisteredExtension* first_extension() { return first_extension_; }
private:
Extension* extension_;
RegisteredExtension* next_;
RegisteredExtension* next_auto_;
- ExtensionTraversalState state_;
static RegisteredExtension* first_extension_;
};
@@ -243,7 +241,7 @@ class Utils {
template <class T>
-static inline T* ToApi(v8::internal::Handle<v8::internal::Object> obj) {
+inline T* ToApi(v8::internal::Handle<v8::internal::Object> obj) {
return reinterpret_cast<T*>(obj.location());
}
@@ -484,7 +482,7 @@ class HandleScopeImplementer {
};
-static const int kHandleBlockSize = v8::internal::KB - 2; // fit in one page
+const int kHandleBlockSize = v8::internal::KB - 2; // fit in one page
void HandleScopeImplementer::SaveContext(Context* context) {
diff --git a/src/3rdparty/v8/src/apinatives.js b/src/3rdparty/v8/src/apinatives.js
index e94da9f..79b41dd 100644
--- a/src/3rdparty/v8/src/apinatives.js
+++ b/src/3rdparty/v8/src/apinatives.js
@@ -37,8 +37,8 @@ function CreateDate(time) {
}
-const kApiFunctionCache = {};
-const functionCache = kApiFunctionCache;
+var kApiFunctionCache = {};
+var functionCache = kApiFunctionCache;
function Instantiate(data, name) {
diff --git a/src/3rdparty/v8/src/arm/assembler-arm-inl.h b/src/3rdparty/v8/src/arm/assembler-arm-inl.h
index 7f9f4ce..d5db686 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm-inl.h
+++ b/src/3rdparty/v8/src/arm/assembler-arm-inl.h
@@ -32,12 +32,13 @@
// The original source code covered by the above license above has been modified
// significantly by Google Inc.
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
#define V8_ARM_ASSEMBLER_ARM_INL_H_
#include "arm/assembler-arm.h"
+
#include "cpu.h"
#include "debug.h"
@@ -46,6 +47,13 @@ namespace v8 {
namespace internal {
+int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
+ ASSERT(!reg.is(kDoubleRegZero));
+ ASSERT(!reg.is(kScratchDoubleReg));
+ return reg.code();
+}
+
+
void RelocInfo::apply(intptr_t delta) {
if (RelocInfo::IsInternalReference(rmode_)) {
// absolute code pointer inside code object moves with the code object.
@@ -64,13 +72,15 @@ Address RelocInfo::target_address() {
Address RelocInfo::target_address_address() {
- ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
+ ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
+ || rmode_ == EMBEDDED_OBJECT
+ || rmode_ == EXTERNAL_REFERENCE);
return reinterpret_cast<Address>(Assembler::target_address_address_at(pc_));
}
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return kPointerSize;
}
@@ -224,7 +234,7 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- visitor->VisitExternalReference(target_reference_address());
+ visitor->VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
@@ -250,7 +260,7 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- StaticVisitor::VisitExternalReference(target_reference_address());
+ StaticVisitor::VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
@@ -354,8 +364,14 @@ Address Assembler::target_address_at(Address pc) {
}
-void Assembler::set_target_at(Address constant_pool_entry,
- Address target) {
+void Assembler::deserialization_set_special_target_at(
+ Address constant_pool_entry, Address target) {
+ Memory::Address_at(constant_pool_entry) = target;
+}
+
+
+void Assembler::set_external_target_at(Address constant_pool_entry,
+ Address target) {
Memory::Address_at(constant_pool_entry) = target;
}
diff --git a/src/3rdparty/v8/src/arm/assembler-arm.cc b/src/3rdparty/v8/src/arm/assembler-arm.cc
index b71fd15..ec28da4 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/assembler-arm.cc
@@ -139,7 +139,6 @@ bool RelocInfo::IsCodedSpecially() {
}
-
void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
// Patch the code at the current address with the supplied instructions.
Instr* pc = reinterpret_cast<Instr*>(pc_);
@@ -238,25 +237,27 @@ MemOperand::MemOperand(Register rn, Register rm,
// add(sp, sp, 4) instruction (aka Pop())
const Instr kPopInstruction =
- al | PostIndex | 4 | LeaveCC | I | sp.code() * B16 | sp.code() * B12;
+ al | PostIndex | 4 | LeaveCC | I | kRegister_sp_Code * B16 |
+ kRegister_sp_Code * B12;
// str(r, MemOperand(sp, 4, NegPreIndex), al) instruction (aka push(r))
// register r is not encoded.
const Instr kPushRegPattern =
- al | B26 | 4 | NegPreIndex | sp.code() * B16;
+ al | B26 | 4 | NegPreIndex | kRegister_sp_Code * B16;
// ldr(r, MemOperand(sp, 4, PostIndex), al) instruction (aka pop(r))
// register r is not encoded.
const Instr kPopRegPattern =
- al | B26 | L | 4 | PostIndex | sp.code() * B16;
+ al | B26 | L | 4 | PostIndex | kRegister_sp_Code * B16;
// mov lr, pc
-const Instr kMovLrPc = al | MOV | pc.code() | lr.code() * B12;
+const Instr kMovLrPc = al | MOV | kRegister_pc_Code | kRegister_lr_Code * B12;
// ldr rd, [pc, #offset]
const Instr kLdrPCMask = kCondMask | 15 * B24 | 7 * B20 | 15 * B16;
-const Instr kLdrPCPattern = al | 5 * B24 | L | pc.code() * B16;
+const Instr kLdrPCPattern = al | 5 * B24 | L | kRegister_pc_Code * B16;
// blxcc rm
const Instr kBlxRegMask =
15 * B24 | 15 * B20 | 15 * B16 | 15 * B12 | 15 * B8 | 15 * B4;
const Instr kBlxRegPattern =
B24 | B21 | 15 * B16 | 15 * B12 | 15 * B8 | BLX;
+const Instr kBlxIp = al | kBlxRegPattern | ip.code();
const Instr kMovMvnMask = 0x6d * B21 | 0xf * B16;
const Instr kMovMvnPattern = 0xd * B21;
const Instr kMovMvnFlip = B22;
@@ -273,13 +274,13 @@ const Instr kAndBicFlip = 0xe * B21;
// A mask for the Rd register for push, pop, ldr, str instructions.
const Instr kLdrRegFpOffsetPattern =
- al | B26 | L | Offset | fp.code() * B16;
+ al | B26 | L | Offset | kRegister_fp_Code * B16;
const Instr kStrRegFpOffsetPattern =
- al | B26 | Offset | fp.code() * B16;
+ al | B26 | Offset | kRegister_fp_Code * B16;
const Instr kLdrRegFpNegOffsetPattern =
- al | B26 | L | NegOffset | fp.code() * B16;
+ al | B26 | L | NegOffset | kRegister_fp_Code * B16;
const Instr kStrRegFpNegOffsetPattern =
- al | B26 | NegOffset | fp.code() * B16;
+ al | B26 | NegOffset | kRegister_fp_Code * B16;
const Instr kLdrStrInstrTypeMask = 0xffff0000;
const Instr kLdrStrInstrArgumentMask = 0x0000ffff;
const Instr kLdrStrOffsetMask = 0x00000fff;
@@ -319,7 +320,7 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
own_buffer_ = false;
}
- // Setup buffer pointers.
+ // Set up buffer pointers.
ASSERT(buffer_ != NULL);
pc_ = buffer_;
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
@@ -351,7 +352,7 @@ void Assembler::GetCode(CodeDesc* desc) {
CheckConstPool(true, false);
ASSERT(num_pending_reloc_info_ == 0);
- // Setup code descriptor.
+ // Set up code descriptor.
desc->buffer = buffer_;
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
@@ -2448,7 +2449,7 @@ void Assembler::GrowBuffer() {
}
CHECK_GT(desc.buffer_size, 0); // no overflow
- // Setup new buffer.
+ // Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);
desc.instr_size = pc_offset();
diff --git a/src/3rdparty/v8/src/arm/assembler-arm.h b/src/3rdparty/v8/src/arm/assembler-arm.h
index 247479d..e2d5f59 100644
--- a/src/3rdparty/v8/src/arm/assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/assembler-arm.h
@@ -32,7 +32,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// A light-weight ARM Assembler
// Generates user mode instructions for the ARM architecture up to version 5
@@ -124,24 +124,47 @@ struct Register {
int code_;
};
-const Register no_reg = { -1 };
-
-const Register r0 = { 0 };
-const Register r1 = { 1 };
-const Register r2 = { 2 };
-const Register r3 = { 3 };
-const Register r4 = { 4 };
-const Register r5 = { 5 };
-const Register r6 = { 6 };
-const Register r7 = { 7 };
-const Register r8 = { 8 }; // Used as context register.
-const Register r9 = { 9 }; // Used as lithium codegen scratch register.
-const Register r10 = { 10 }; // Used as roots register.
-const Register fp = { 11 };
-const Register ip = { 12 };
-const Register sp = { 13 };
-const Register lr = { 14 };
-const Register pc = { 15 };
+// These constants are used in several locations, including static initializers
+const int kRegister_no_reg_Code = -1;
+const int kRegister_r0_Code = 0;
+const int kRegister_r1_Code = 1;
+const int kRegister_r2_Code = 2;
+const int kRegister_r3_Code = 3;
+const int kRegister_r4_Code = 4;
+const int kRegister_r5_Code = 5;
+const int kRegister_r6_Code = 6;
+const int kRegister_r7_Code = 7;
+const int kRegister_r8_Code = 8;
+const int kRegister_r9_Code = 9;
+const int kRegister_r10_Code = 10;
+const int kRegister_fp_Code = 11;
+const int kRegister_ip_Code = 12;
+const int kRegister_sp_Code = 13;
+const int kRegister_lr_Code = 14;
+const int kRegister_pc_Code = 15;
+
+const Register no_reg = { kRegister_no_reg_Code };
+
+const Register r0 = { kRegister_r0_Code };
+const Register r1 = { kRegister_r1_Code };
+const Register r2 = { kRegister_r2_Code };
+const Register r3 = { kRegister_r3_Code };
+const Register r4 = { kRegister_r4_Code };
+const Register r5 = { kRegister_r5_Code };
+const Register r6 = { kRegister_r6_Code };
+const Register r7 = { kRegister_r7_Code };
+// Used as context register.
+const Register r8 = { kRegister_r8_Code };
+// Used as lithium codegen scratch register.
+const Register r9 = { kRegister_r9_Code };
+// Used as roots register.
+const Register r10 = { kRegister_r10_Code };
+const Register fp = { kRegister_fp_Code };
+const Register ip = { kRegister_ip_Code };
+const Register sp = { kRegister_sp_Code };
+const Register lr = { kRegister_lr_Code };
+const Register pc = { kRegister_pc_Code };
+
// Single word VFP register.
struct SwVfpRegister {
@@ -176,14 +199,11 @@ struct DwVfpRegister {
static const int kNumAllocatableRegisters = kNumRegisters -
kNumReservedRegisters;
- static int ToAllocationIndex(DwVfpRegister reg) {
- ASSERT(reg.code() != 0);
- return reg.code() - 1;
- }
+ inline static int ToAllocationIndex(DwVfpRegister reg);
static DwVfpRegister FromAllocationIndex(int index) {
ASSERT(index >= 0 && index < kNumAllocatableRegisters);
- return from_code(index + 1);
+ return from_code(index);
}
static const char* AllocationIndexToString(int index) {
@@ -303,10 +323,13 @@ const DwVfpRegister d13 = { 13 };
const DwVfpRegister d14 = { 14 };
const DwVfpRegister d15 = { 15 };
-// Aliases for double registers.
-static const DwVfpRegister& kFirstCalleeSavedDoubleReg = d8;
-static const DwVfpRegister& kLastCalleeSavedDoubleReg = d15;
-static const DwVfpRegister& kDoubleRegZero = d14;
+// Aliases for double registers. Defined using #define instead of
+// "static const DwVfpRegister&" because Clang complains otherwise when a
+// compilation unit that includes this header doesn't use the variables.
+#define kFirstCalleeSavedDoubleReg d8
+#define kLastCalleeSavedDoubleReg d15
+#define kDoubleRegZero d14
+#define kScratchDoubleReg d15
// Coprocessor register
@@ -581,6 +604,7 @@ extern const Instr kLdrPCMask;
extern const Instr kLdrPCPattern;
extern const Instr kBlxRegMask;
extern const Instr kBlxRegPattern;
+extern const Instr kBlxIp;
extern const Instr kMovMvnMask;
extern const Instr kMovMvnPattern;
@@ -662,20 +686,18 @@ class Assembler : public AssemblerBase {
// This sets the branch destination (which is in the constant pool on ARM).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address constant_pool_entry, Address target);
+ inline static void deserialization_set_special_target_at(
+ Address constant_pool_entry, Address target);
// This sets the branch destination (which is in the constant pool on ARM).
// This is for calls and branches to runtime code.
inline static void set_external_target_at(Address constant_pool_entry,
- Address target) {
- set_target_at(constant_pool_entry, target);
- }
+ Address target);
// Here we are patching the address in the constant pool, not the actual call
// instruction. The address in the constant pool is the same size as a
// pointer.
- static const int kCallTargetSize = kPointerSize;
- static const int kExternalTargetSize = kPointerSize;
+ static const int kSpecialTargetSize = kPointerSize;
// Size of an instruction.
static const int kInstrSize = sizeof(Instr);
diff --git a/src/3rdparty/v8/src/arm/builtins-arm.cc b/src/3rdparty/v8/src/arm/builtins-arm.cc
index d0136f5..c99e778 100644
--- a/src/3rdparty/v8/src/arm/builtins-arm.cc
+++ b/src/3rdparty/v8/src/arm/builtins-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -72,6 +72,22 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
+// Load the built-in InternalArray function from the current context.
+static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
+ Register result) {
+ // Load the global context.
+
+ __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(result,
+ FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
+ // Load the InternalArray function from the global context.
+ __ ldr(result,
+ MemOperand(result,
+ Context::SlotOffset(
+ Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
+}
+
+
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
// Load the global context.
@@ -98,9 +114,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- // Load the initial map from the array function.
- __ ldr(scratch1, FieldMemOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -194,9 +208,7 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ ldr(elements_array_storage,
- FieldMemOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ tst(array_size, array_size);
@@ -300,7 +312,8 @@ static void AllocateJSArray(MacroAssembler* masm,
static void ArrayNativeCode(MacroAssembler* masm,
Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
- Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array;
+ Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
+ has_non_smi_element, finish, cant_transition_map, not_double;
// Check for array construction with zero arguments or one.
__ cmp(r0, Operand(0, RelocInfo::NONE));
@@ -316,7 +329,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
r5,
call_generic_code);
__ IncrementCounter(counters->array_function_native(), 1, r3, r4);
- // Setup return value, remove receiver from stack and return.
+ // Set up return value, remove receiver from stack and return.
__ mov(r0, r2);
__ add(sp, sp, Operand(kPointerSize));
__ Jump(lr);
@@ -359,7 +372,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
true,
call_generic_code);
__ IncrementCounter(counters->array_function_native(), 1, r2, r4);
- // Setup return value, remove receiver and argument from stack and return.
+ // Set up return value, remove receiver and argument from stack and return.
__ mov(r0, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Jump(lr);
@@ -394,14 +407,21 @@ static void ArrayNativeCode(MacroAssembler* masm,
// r5: elements_array_end (untagged)
// sp[0]: last argument
Label loop, entry;
+ __ mov(r7, sp);
__ jmp(&entry);
__ bind(&loop);
- __ ldr(r2, MemOperand(sp, kPointerSize, PostIndex));
+ __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
+ if (FLAG_smi_only_arrays) {
+ __ JumpIfNotSmi(r2, &has_non_smi_element);
+ }
__ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
__ bind(&entry);
__ cmp(r4, r5);
__ b(lt, &loop);
+ __ bind(&finish);
+ __ mov(sp, r7);
+
// Remove caller arguments and receiver from the stack, setup return value and
// return.
// r0: argc
@@ -410,6 +430,75 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ add(sp, sp, Operand(kPointerSize));
__ mov(r0, r3);
__ Jump(lr);
+
+ __ bind(&has_non_smi_element);
+ // Double values are handled by the runtime.
+ __ CheckMap(
+ r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
+ __ bind(&cant_transition_map);
+ __ UndoAllocationInNewSpace(r3, r4);
+ __ b(call_generic_code);
+
+ __ bind(&not_double);
+ // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // r3: JSArray
+ __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ r2,
+ r9,
+ &cant_transition_map);
+ __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ RecordWriteField(r3,
+ HeapObject::kMapOffset,
+ r2,
+ r9,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ Label loop2;
+ __ sub(r7, r7, Operand(kPointerSize));
+ __ bind(&loop2);
+ __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
+ __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
+ __ cmp(r4, r5);
+ __ b(lt, &loop2);
+ __ b(&finish);
+}
+
+
+void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : number of arguments
+ // -- lr : return address
+ // -- sp[...]: constructor arguments
+ // -----------------------------------
+ Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
+
+ // Get the InternalArray function.
+ GenerateLoadInternalArrayFunction(masm, r1);
+
+ if (FLAG_debug_code) {
+ // Initial map for the builtin InternalArray functions should be maps.
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ tst(r2, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for InternalArray function");
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ Assert(eq, "Unexpected initial map for InternalArray function");
+ }
+
+ // Run the native code for the InternalArray function called as a normal
+ // function.
+ ArrayNativeCode(masm, &generic_array_code);
+
+ // Jump to the generic array code if the specialized code cannot handle the
+ // construction.
+ __ bind(&generic_array_code);
+
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->InternalArrayCodeGeneric();
+ __ Jump(array_code, RelocInfo::CODE_TARGET);
}
@@ -607,7 +696,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
-void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
+static void Generate_JSConstructStubHelper(MacroAssembler* masm,
+ bool is_api_function,
+ bool count_constructions) {
// ----------- S t a t e -------------
// -- r0 : number of arguments
// -- r1 : constructor function
@@ -615,42 +706,6 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// -- sp[...]: constructor arguments
// -----------------------------------
- Label slow, non_function_call;
- // Check that the function is not a smi.
- __ JumpIfSmi(r1, &non_function_call);
- // Check that the function is a JSFunction.
- __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
- __ b(ne, &slow);
-
- // Jump to the function-specific construct stub.
- __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
- __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
- __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // r0: number of arguments
- // r1: called object
- // r2: object type
- Label do_call;
- __ bind(&slow);
- __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
- __ b(ne, &non_function_call);
- __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
- __ jmp(&do_call);
-
- __ bind(&non_function_call);
- __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ bind(&do_call);
- // Set expected number of arguments to zero (not changing r0).
- __ mov(r2, Operand(0, RelocInfo::NONE));
- __ SetCallKind(r5, CALL_AS_METHOD);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
- RelocInfo::CODE_TARGET);
-}
-
-
-static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool count_constructions) {
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
@@ -873,40 +928,31 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// r4: JSObject
__ bind(&allocated);
__ push(r4);
+ __ push(r4);
- // Push the function and the allocated receiver from the stack.
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ ldr(r1, MemOperand(sp, kPointerSize));
- __ push(r1); // Constructor function.
- __ push(r4); // Receiver.
-
- // Reload the number of arguments from the stack.
- // r1: constructor function
+ // Reload the number of arguments and the constructor from the stack.
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
+ __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
+ __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
- // Setup pointer to last argument.
+ // Set up pointer to last argument.
__ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
- // Setup number of arguments for function call below
+ // Set up number of arguments for function call below
__ mov(r0, Operand(r3, LSR, kSmiTagSize));
// Copy arguments and receiver to the expression stack.
// r0: number of arguments
- // r2: address of last argument (caller sp)
// r1: constructor function
+ // r2: address of last argument (caller sp)
// r3: number of arguments (smi-tagged)
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ b(&entry);
__ bind(&loop);
@@ -932,12 +978,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
- // Pop the function from the stack.
- // sp[0]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ pop();
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
// Restore context from the frame.
// r0: result
@@ -1022,10 +1066,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Set up the context from the function argument.
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
- // Set up the roots register.
- ExternalReference roots_array_start =
- ExternalReference::roots_array_start(masm->isolate());
- __ mov(r10, Operand(roots_array_start));
+ __ InitializeRootRegister();
// Push the function and the receiver onto the stack.
__ push(r1);
@@ -1060,7 +1101,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code and pass argc as r0.
__ mov(r0, Operand(r3));
if (is_construct) {
- __ Call(masm->isolate()->builtins()->JSConstructCall());
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ __ CallStub(&stub);
} else {
ParameterCount actual(r0);
__ InvokeFunction(r1, actual, CALL_FUNCTION,
@@ -1240,7 +1282,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
// 1. Make sure we have at least one argument.
// r0: actual number of arguments
{ Label done;
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
__ b(ne, &done);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2);
@@ -1703,6 +1745,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&invoke);
__ Call(r3);
+ // Store offset of return address for deoptimizer.
+ masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ Jump(lr);
diff --git a/src/3rdparty/v8/src/arm/code-stubs-arm.cc b/src/3rdparty/v8/src/arm/code-stubs-arm.cc
index 1a569b2..f772db9 100644
--- a/src/3rdparty/v8/src/arm/code-stubs-arm.cc
+++ b/src/3rdparty/v8/src/arm/code-stubs-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -98,9 +98,9 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
- int map_index = strict_mode_ == kStrictMode
- ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
- : Context::FUNCTION_MAP_INDEX;
+ int map_index = (language_mode_ == CLASSIC_MODE)
+ ? Context::FUNCTION_MAP_INDEX
+ : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
@@ -122,7 +122,6 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
__ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
__ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
-
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
__ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
@@ -156,25 +155,19 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Load the function from the stack.
__ ldr(r3, MemOperand(sp, 0));
- // Setup the object header.
- __ LoadRoot(r2, Heap::kFunctionContextMapRootIndex);
- __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+ // Set up the object header.
+ __ LoadRoot(r1, Heap::kFunctionContextMapRootIndex);
__ mov(r2, Operand(Smi::FromInt(length)));
__ str(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
+ __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
- // Setup the fixed slots.
+ // Set up the fixed slots, copy the global object from the previous context.
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(r1, Operand(Smi::FromInt(0)));
__ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ str(cp, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
__ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX)));
-
- // Copy the global object from the previous context.
- __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
-
- // Copy the qml global object from the surrounding context.
- __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ str(r1, MemOperand(r0, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
+ __ str(r2, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX)));
// Initialize the rest of the slots to undefined.
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
@@ -211,7 +204,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Load the serialized scope info from the stack.
__ ldr(r1, MemOperand(sp, 1 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
__ LoadRoot(r2, Heap::kBlockContextMapRootIndex);
__ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ mov(r2, Operand(Smi::FromInt(length)));
@@ -233,18 +226,12 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ ldr(r3, ContextOperand(r3, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
- // Setup the fixed slots.
+ // Set up the fixed slots, copy the global object from the previous context.
+ __ ldr(r2, ContextOperand(cp, Context::GLOBAL_INDEX));
__ str(r3, ContextOperand(r0, Context::CLOSURE_INDEX));
__ str(cp, ContextOperand(r0, Context::PREVIOUS_INDEX));
__ str(r1, ContextOperand(r0, Context::EXTENSION_INDEX));
-
- // Copy the global object from the previous context.
- __ ldr(r1, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ str(r1, ContextOperand(r0, Context::GLOBAL_INDEX));
-
- // Copy the qml global object from the surrounding context.
- __ ldr(r1, ContextOperand(cp, Context::QML_GLOBAL_INDEX));
- __ str(r1, ContextOperand(r0, Context::QML_GLOBAL_INDEX));
+ __ str(r2, ContextOperand(r0, Context::GLOBAL_INDEX));
// Initialize the rest of the slots to the hole value.
__ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
@@ -263,22 +250,62 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
+static void GenerateFastCloneShallowArrayCommon(
+ MacroAssembler* masm,
+ int length,
+ FastCloneShallowArrayStub::Mode mode,
+ Label* fail) {
+ // Registers on entry:
//
- // [sp]: constant elements.
- // [sp + kPointerSize]: literal index.
- // [sp + (2 * kPointerSize)]: literals array.
+ // r3: boilerplate literal array.
+ ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
- if (length_ > 0) {
- elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
- ? FixedDoubleArray::SizeFor(length_)
- : FixedArray::SizeFor(length_);
+ if (length > 0) {
+ elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ ? FixedDoubleArray::SizeFor(length)
+ : FixedArray::SizeFor(length);
}
int size = JSArray::kSize + elements_size;
+ // Allocate both the JS array and the elements array in one big
+ // allocation. This avoids multiple limit checks.
+ __ AllocateInNewSpace(size,
+ r0,
+ r1,
+ r2,
+ fail,
+ TAG_OBJECT);
+
+ // Copy the JS array part.
+ for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
+ if ((i != JSArray::kElementsOffset) || (length == 0)) {
+ __ ldr(r1, FieldMemOperand(r3, i));
+ __ str(r1, FieldMemOperand(r0, i));
+ }
+ }
+
+ if (length > 0) {
+ // Get hold of the elements array of the boilerplate and setup the
+ // elements pointer in the resulting object.
+ __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
+ __ add(r2, r0, Operand(JSArray::kSize));
+ __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
+
+ // Copy the elements array.
+ ASSERT((elements_size % kPointerSize) == 0);
+ __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
+ }
+}
+
+void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [sp]: constant elements.
+ // [sp + kPointerSize]: literal index.
+ // [sp + (2 * kPointerSize)]: literals array.
+
// Load boilerplate object into r3 and check if we need to create a
// boilerplate.
Label slow_case;
@@ -286,68 +313,109 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ ldr(r0, MemOperand(sp, 1 * kPointerSize));
__ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(r3, ip);
+ __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
__ b(eq, &slow_case);
+ FastCloneShallowArrayStub::Mode mode = mode_;
+ if (mode == CLONE_ANY_ELEMENTS) {
+ Label double_elements, check_fast_elements;
+ __ ldr(r0, FieldMemOperand(r3, JSArray::kElementsOffset));
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ CompareRoot(r0, Heap::kFixedCOWArrayMapRootIndex);
+ __ b(ne, &check_fast_elements);
+ GenerateFastCloneShallowArrayCommon(masm, 0,
+ COPY_ON_WRITE_ELEMENTS, &slow_case);
+ // Return and remove the on-stack parameters.
+ __ add(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
+
+ __ bind(&check_fast_elements);
+ __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
+ __ b(ne, &double_elements);
+ GenerateFastCloneShallowArrayCommon(masm, length_,
+ CLONE_ELEMENTS, &slow_case);
+ // Return and remove the on-stack parameters.
+ __ add(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
+
+ __ bind(&double_elements);
+ mode = CLONE_DOUBLE_ELEMENTS;
+ // Fall through to generate the code to handle double elements.
+ }
+
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
- if (mode_ == CLONE_ELEMENTS) {
+ if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
- } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+ } else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
- ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
+ ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
__ push(r3);
__ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
__ ldr(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
- __ LoadRoot(ip, expected_map_index);
- __ cmp(r3, ip);
+ __ CompareRoot(r3, expected_map_index);
__ Assert(eq, message);
__ pop(r3);
}
- // Allocate both the JS array and the elements array in one big
- // allocation. This avoids multiple limit checks.
- __ AllocateInNewSpace(size,
- r0,
- r1,
- r2,
- &slow_case,
- TAG_OBJECT);
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
- // Copy the JS array part.
- for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
- __ ldr(r1, FieldMemOperand(r3, i));
- __ str(r1, FieldMemOperand(r0, i));
- }
- }
+ // Return and remove the on-stack parameters.
+ __ add(sp, sp, Operand(3 * kPointerSize));
+ __ Ret();
- if (length_ > 0) {
- // Get hold of the elements array of the boilerplate and setup the
- // elements pointer in the resulting object.
- __ ldr(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
- __ add(r2, r0, Operand(JSArray::kSize));
- __ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
+ __ bind(&slow_case);
+ __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
+}
- // Copy the elements array.
- ASSERT((elements_size % kPointerSize) == 0);
- __ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
+
+void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [sp]: object literal flags.
+ // [sp + kPointerSize]: constant properties.
+ // [sp + (2 * kPointerSize)]: literal index.
+ // [sp + (3 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into r3 and check if we need to create a
+ // boilerplate.
+ Label slow_case;
+ __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
+ __ ldr(r0, MemOperand(sp, 2 * kPointerSize));
+ __ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ ldr(r3, MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
+ __ b(eq, &slow_case);
+
+ // Check that the boilerplate contains only fast properties and we can
+ // statically determine the instance size.
+ int size = JSObject::kHeaderSize + length_ * kPointerSize;
+ __ ldr(r0, FieldMemOperand(r3, HeapObject::kMapOffset));
+ __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceSizeOffset));
+ __ cmp(r0, Operand(size >> kPointerSizeLog2));
+ __ b(ne, &slow_case);
+
+ // Allocate the JS object and copy header together with all in-object
+ // properties from the boilerplate.
+ __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT);
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ ldr(r1, FieldMemOperand(r3, i));
+ __ str(r1, FieldMemOperand(r0, i));
}
// Return and remove the on-stack parameters.
- __ add(sp, sp, Operand(3 * kPointerSize));
+ __ add(sp, sp, Operand(4 * kPointerSize));
__ Ret();
__ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
+ __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
}
@@ -412,7 +480,7 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
__ b(gt, &not_special);
// For 1 or -1 we need to or in the 0 exponent (biased to 1023).
- static const uint32_t exponent_word_for_1 =
+ const uint32_t exponent_word_for_1 =
HeapNumber::kExponentBias << HeapNumber::kExponentShift;
__ orr(exponent, exponent, Operand(exponent_word_for_1), LeaveCC, eq);
// 1, 0 and -1 all have 0 for the second word.
@@ -515,7 +583,9 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Label is_smi, done;
- __ JumpIfSmi(object, &is_smi);
+ // Smi-check
+ __ UntagAndJumpIfSmi(scratch1, object, &is_smi);
+ // Heap number check
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
// Handle loading a double from a heap number.
@@ -537,7 +607,6 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
if (CpuFeatures::IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
// Convert smi to double using VFP instructions.
- __ SmiUntag(scratch1, object);
__ vmov(dst.high(), scratch1);
__ vcvt_f64_s32(dst, dst.high());
if (destination == kCoreRegisters) {
@@ -572,11 +641,10 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Heap::kHeapNumberMapRootIndex,
"HeapNumberMap register clobbered.");
}
- Label is_smi;
Label done;
Label not_in_int32_range;
- __ JumpIfSmi(object, &is_smi);
+ __ UntagAndJumpIfSmi(dst, object, &done);
__ ldr(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset));
__ cmp(scratch1, heap_number_map);
__ b(ne, not_number);
@@ -596,10 +664,6 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
scratch1,
scratch2,
scratch3);
- __ jmp(&done);
-
- __ bind(&is_smi);
- __ SmiUntag(dst, object);
__ bind(&done);
}
@@ -642,7 +706,7 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
// Get the absolute value of the object (as an unsigned integer).
__ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi);
- // Get mantisssa[51:20].
+ // Get mantissa[51:20].
// Get the position of the first set bit.
__ CountLeadingZeros(dst1, int_scratch, scratch2);
@@ -772,10 +836,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
Label done;
- // Untag the object into the destination register.
- __ SmiUntag(dst, object);
- // Just return if the object is a smi.
- __ JumpIfSmi(object, &done);
+ __ UntagAndJumpIfSmi(dst, object, &done);
if (FLAG_debug_code) {
__ AbortIfNotRootValue(heap_number_map,
@@ -876,7 +937,7 @@ void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm,
// non zero bits left. So we need the (30 - exponent) last bits of the
// 31 higher bits of the mantissa to be null.
// Because bits [21:0] are null, we can check instead that the
- // (32 - exponent) last bits of the 32 higher bits of the mantisssa are null.
+ // (32 - exponent) last bits of the 32 higher bits of the mantissa are null.
// Get the 32 higher bits of the mantissa in dst.
__ Ubfx(dst,
@@ -1573,37 +1634,6 @@ void CompareStub::Generate(MacroAssembler* masm) {
// NOTICE! This code is only reached after a smi-fast-case check, so
// it is certain that at least one operand isn't a smi.
- {
- Label not_user_equal, user_equal;
- __ and_(r2, r1, Operand(r0));
- __ tst(r2, Operand(kSmiTagMask));
- __ b(eq, &not_user_equal);
-
- __ CompareObjectType(r0, r2, r4, JS_OBJECT_TYPE);
- __ b(ne, &not_user_equal);
-
- __ CompareObjectType(r1, r3, r4, JS_OBJECT_TYPE);
- __ b(ne, &not_user_equal);
-
- __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
- __ and_(r2, r2, Operand(1 << Map::kUseUserObjectComparison));
- __ cmp(r2, Operand(1 << Map::kUseUserObjectComparison));
- __ b(eq, &user_equal);
-
- __ ldrb(r3, FieldMemOperand(r3, Map::kBitField2Offset));
- __ and_(r3, r3, Operand(1 << Map::kUseUserObjectComparison));
- __ cmp(r3, Operand(1 << Map::kUseUserObjectComparison));
- __ b(ne, &not_user_equal);
-
- __ bind(&user_equal);
-
- __ Push(r0, r1);
- __ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
- __ bind(&not_user_equal);
- }
-
-
// Handle the case where the objects are identical. Either returns the answer
// or goes to slow. Only falls through if the objects were not identical.
EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
@@ -2294,7 +2324,7 @@ void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
__ cmp(ip, Operand(scratch2));
__ b(ne, &not_smi_result);
// Go slow on zero result to handle -0.
- __ tst(scratch1, Operand(scratch1));
+ __ cmp(scratch1, Operand(0));
__ mov(right, Operand(scratch1), LeaveCC, ne);
__ Ret(ne);
// We need -0 if we were multiplying a negative number with 0 to get 0.
@@ -3266,10 +3296,12 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
// Check if cache matches: Double value is stored in uint32_t[2] array.
__ ldm(ia, cache_entry, r4.bit() | r5.bit() | r6.bit());
__ cmp(r2, r4);
- __ b(ne, &calculate);
- __ cmp(r3, r5);
+ __ cmp(r3, r5, eq);
__ b(ne, &calculate);
// Cache hit. Load result, cleanup and return.
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(
+ counters->transcendental_cache_hit(), 1, scratch0, scratch1);
if (tagged) {
// Pop input value from stack and load result into r0.
__ pop();
@@ -3282,6 +3314,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
} // if (CpuFeatures::IsSupported(VFP3))
__ bind(&calculate);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(
+ counters->transcendental_cache_miss(), 1, scratch0, scratch1);
if (tagged) {
__ bind(&invalid_cache);
ExternalReference runtime_function =
@@ -3369,6 +3404,10 @@ void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
__ CallCFunction(ExternalReference::math_cos_double_function(isolate),
0, 1);
break;
+ case TranscendentalCache::TAN:
+ __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
+ 0, 1);
+ break;
case TranscendentalCache::LOG:
__ CallCFunction(ExternalReference::math_log_double_function(isolate),
0, 1);
@@ -3386,6 +3425,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
// Add more cases when necessary.
case TranscendentalCache::SIN: return Runtime::kMath_sin;
case TranscendentalCache::COS: return Runtime::kMath_cos;
+ case TranscendentalCache::TAN: return Runtime::kMath_tan;
case TranscendentalCache::LOG: return Runtime::kMath_log;
default:
UNIMPLEMENTED();
@@ -3399,111 +3439,207 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
}
-void MathPowStub::Generate(MacroAssembler* masm) {
- Label call_runtime;
-
- if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
+void InterruptStub::Generate(MacroAssembler* masm) {
+ __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
+}
- Label base_not_smi;
- Label exponent_not_smi;
- Label convert_exponent;
-
- const Register base = r0;
- const Register exponent = r1;
- const Register heapnumbermap = r5;
- const Register heapnumber = r6;
- const DoubleRegister double_base = d0;
- const DoubleRegister double_exponent = d1;
- const DoubleRegister double_result = d2;
- const SwVfpRegister single_scratch = s0;
- const Register scratch = r9;
- const Register scratch2 = r7;
- __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
+void MathPowStub::Generate(MacroAssembler* masm) {
+ CpuFeatures::Scope vfp3_scope(VFP3);
+ const Register base = r1;
+ const Register exponent = r2;
+ const Register heapnumbermap = r5;
+ const Register heapnumber = r0;
+ const DoubleRegister double_base = d1;
+ const DoubleRegister double_exponent = d2;
+ const DoubleRegister double_result = d3;
+ const DoubleRegister double_scratch = d0;
+ const SwVfpRegister single_scratch = s0;
+ const Register scratch = r9;
+ const Register scratch2 = r7;
+
+ Label call_runtime, done, int_exponent;
+ if (exponent_type_ == ON_STACK) {
+ Label base_is_smi, unpack_exponent;
+ // The exponent and base are supplied as arguments on the stack.
+ // This can only happen if the stub is called from non-optimized code.
+ // Load input parameters from stack to double registers.
__ ldr(base, MemOperand(sp, 1 * kPointerSize));
__ ldr(exponent, MemOperand(sp, 0 * kPointerSize));
- // Convert base to double value and store it in d0.
- __ JumpIfNotSmi(base, &base_not_smi);
- // Base is a Smi. Untag and convert it.
- __ SmiUntag(base);
- __ vmov(single_scratch, base);
- __ vcvt_f64_s32(double_base, single_scratch);
- __ b(&convert_exponent);
+ __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
- __ bind(&base_not_smi);
+ __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
__ ldr(scratch, FieldMemOperand(base, JSObject::kMapOffset));
__ cmp(scratch, heapnumbermap);
__ b(ne, &call_runtime);
- // Base is a heapnumber. Load it into double register.
+
__ vldr(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
+ __ jmp(&unpack_exponent);
- __ bind(&convert_exponent);
- __ JumpIfNotSmi(exponent, &exponent_not_smi);
- __ SmiUntag(exponent);
-
- // The base is in a double register and the exponent is
- // an untagged smi. Allocate a heap number and call a
- // C function for integer exponents. The register containing
- // the heap number is callee-saved.
- __ AllocateHeapNumber(heapnumber,
- scratch,
- scratch2,
- heapnumbermap,
- &call_runtime);
- __ push(lr);
- __ PrepareCallCFunction(1, 1, scratch);
- __ SetCallCDoubleArguments(double_base, exponent);
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ CallCFunction(
- ExternalReference::power_double_int_function(masm->isolate()),
- 1, 1);
- __ pop(lr);
- __ GetCFunctionDoubleResult(double_result);
- }
- __ vstr(double_result,
- FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- __ mov(r0, heapnumber);
- __ Ret(2 * kPointerSize);
+ __ bind(&base_is_smi);
+ __ vmov(single_scratch, scratch);
+ __ vcvt_f64_s32(double_base, single_scratch);
+ __ bind(&unpack_exponent);
+
+ __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
- __ bind(&exponent_not_smi);
__ ldr(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
__ cmp(scratch, heapnumbermap);
__ b(ne, &call_runtime);
- // Exponent is a heapnumber. Load it into double register.
__ vldr(double_exponent,
FieldMemOperand(exponent, HeapNumber::kValueOffset));
+ } else if (exponent_type_ == TAGGED) {
+ // Base is already in double_base.
+ __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
+
+ __ vldr(double_exponent,
+ FieldMemOperand(exponent, HeapNumber::kValueOffset));
+ }
+
+ if (exponent_type_ != INTEGER) {
+ Label int_exponent_convert;
+ // Detect integer exponents stored as double.
+ __ vcvt_u32_f64(single_scratch, double_exponent);
+ // We do not check for NaN or Infinity here because comparing numbers on
+ // ARM correctly distinguishes NaNs. We end up calling the built-in.
+ __ vcvt_f64_u32(double_scratch, single_scratch);
+ __ VFPCompareAndSetFlags(double_scratch, double_exponent);
+ __ b(eq, &int_exponent_convert);
+
+ if (exponent_type_ == ON_STACK) {
+ // Detect square root case. Crankshaft detects constant +/-0.5 at
+ // compile time and uses DoMathPowHalf instead. We then skip this check
+ // for non-constant cases of +/-0.5 as these hardly occur.
+ Label not_plus_half;
+
+ // Test for 0.5.
+ __ vmov(double_scratch, 0.5);
+ __ VFPCompareAndSetFlags(double_exponent, double_scratch);
+ __ b(ne, &not_plus_half);
+
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
+ __ vmov(double_scratch, -V8_INFINITY);
+ __ VFPCompareAndSetFlags(double_base, double_scratch);
+ __ vneg(double_result, double_scratch, eq);
+ __ b(eq, &done);
+
+ // Add +0 to convert -0 to +0.
+ __ vadd(double_scratch, double_base, kDoubleRegZero);
+ __ vsqrt(double_result, double_scratch);
+ __ jmp(&done);
+
+ __ bind(&not_plus_half);
+ __ vmov(double_scratch, -0.5);
+ __ VFPCompareAndSetFlags(double_exponent, double_scratch);
+ __ b(ne, &call_runtime);
+
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
+ __ vmov(double_scratch, -V8_INFINITY);
+ __ VFPCompareAndSetFlags(double_base, double_scratch);
+ __ vmov(double_result, kDoubleRegZero, eq);
+ __ b(eq, &done);
+
+ // Add +0 to convert -0 to +0.
+ __ vadd(double_scratch, double_base, kDoubleRegZero);
+ __ vmov(double_result, 1);
+ __ vsqrt(double_scratch, double_scratch);
+ __ vdiv(double_result, double_result, double_scratch);
+ __ jmp(&done);
+ }
- // The base and the exponent are in double registers.
- // Allocate a heap number and call a C function for
- // double exponents. The register containing
- // the heap number is callee-saved.
- __ AllocateHeapNumber(heapnumber,
- scratch,
- scratch2,
- heapnumbermap,
- &call_runtime);
__ push(lr);
- __ PrepareCallCFunction(0, 2, scratch);
- __ SetCallCDoubleArguments(double_base, double_exponent);
{
AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(0, 2, scratch);
+ __ SetCallCDoubleArguments(double_base, double_exponent);
__ CallCFunction(
ExternalReference::power_double_double_function(masm->isolate()),
0, 2);
- __ pop(lr);
- __ GetCFunctionDoubleResult(double_result);
}
+ __ pop(lr);
+ __ GetCFunctionDoubleResult(double_result);
+ __ jmp(&done);
+
+ __ bind(&int_exponent_convert);
+ __ vcvt_u32_f64(single_scratch, double_exponent);
+ __ vmov(scratch, single_scratch);
+ }
+
+ // Calculate power with integer exponent.
+ __ bind(&int_exponent);
+
+ // Get two copies of exponent in the registers scratch and exponent.
+ if (exponent_type_ == INTEGER) {
+ __ mov(scratch, exponent);
+ } else {
+ // Exponent has previously been stored into scratch as untagged integer.
+ __ mov(exponent, scratch);
+ }
+ __ vmov(double_scratch, double_base); // Back up base.
+ __ vmov(double_result, 1.0);
+
+ // Get absolute value of exponent.
+ __ cmp(scratch, Operand(0));
+ __ mov(scratch2, Operand(0), LeaveCC, mi);
+ __ sub(scratch, scratch2, scratch, LeaveCC, mi);
+
+ Label while_true;
+ __ bind(&while_true);
+ __ mov(scratch, Operand(scratch, ASR, 1), SetCC);
+ __ vmul(double_result, double_result, double_scratch, cs);
+ __ vmul(double_scratch, double_scratch, double_scratch, ne);
+ __ b(ne, &while_true);
+
+ __ cmp(exponent, Operand(0));
+ __ b(ge, &done);
+ __ vmov(double_scratch, 1.0);
+ __ vdiv(double_result, double_scratch, double_result);
+ // Test whether result is zero. Bail out to check for subnormal result.
+ // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
+ __ VFPCompareAndSetFlags(double_result, 0.0);
+ __ b(ne, &done);
+ // double_exponent may not containe the exponent value if the input was a
+ // smi. We set it with exponent value before bailing out.
+ __ vmov(single_scratch, exponent);
+ __ vcvt_f64_s32(double_exponent, single_scratch);
+
+ // Returning or bailing out.
+ Counters* counters = masm->isolate()->counters();
+ if (exponent_type_ == ON_STACK) {
+ // The arguments are still on the stack.
+ __ bind(&call_runtime);
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+
+ // The stub is called from non-optimized code, which expects the result
+ // as heap number in exponent.
+ __ bind(&done);
+ __ AllocateHeapNumber(
+ heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
__ vstr(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- __ mov(r0, heapnumber);
- __ Ret(2 * kPointerSize);
- }
+ ASSERT(heapnumber.is(r0));
+ __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
+ __ Ret(2);
+ } else {
+ __ push(lr);
+ {
+ AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(0, 2, scratch);
+ __ SetCallCDoubleArguments(double_base, double_exponent);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()),
+ 0, 2);
+ }
+ __ pop(lr);
+ __ GetCFunctionDoubleResult(double_result);
- __ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+ __ bind(&done);
+ __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
+ __ Ret();
+ }
}
@@ -3543,17 +3679,6 @@ void CEntryStub::GenerateAheadOfTime() {
}
-void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- __ Throw(r0);
-}
-
-
-void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
- UncatchableExceptionType type) {
- __ ThrowUncatchable(type, r0);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
@@ -3695,7 +3820,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(save_doubles_);
- // Setup argc and the builtin function in callee-saved registers.
+ // Set up argc and the builtin function in callee-saved registers.
__ mov(r4, Operand(r0));
__ mov(r5, Operand(r1));
@@ -3734,13 +3859,27 @@ void CEntryStub::Generate(MacroAssembler* masm) {
true);
__ bind(&throw_out_of_memory_exception);
- GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
+ // Set external caught exception to false.
+ Isolate* isolate = masm->isolate();
+ ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
+ isolate);
+ __ mov(r0, Operand(false, RelocInfo::NONE));
+ __ mov(r2, Operand(external_caught));
+ __ str(r0, MemOperand(r2));
+
+ // Set pending exception and r0 to out of memory exception.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ __ mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
+ isolate)));
+ __ str(r0, MemOperand(r2));
+ // Fall through to the next label.
__ bind(&throw_termination_exception);
- GenerateThrowUncatchable(masm, TERMINATION);
+ __ ThrowUncatchable(r0);
__ bind(&throw_normal_exception);
- GenerateThrowTOS(masm);
+ __ Throw(r0);
}
@@ -3751,7 +3890,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// r3: argc
// [sp+0]: argv
- Label invoke, exit;
+ Label invoke, handler_entry, exit;
// Called from C, so do not pop argc and args on exit (preserve sp)
// No need to save register-passed args
@@ -3772,7 +3911,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// r2: receiver
// r3: argc
- // Setup argv in r4.
+ // Set up argv in r4.
int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
if (CpuFeatures::IsSupported(VFP3)) {
offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
@@ -3795,7 +3934,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ ldr(r5, MemOperand(r5));
__ Push(r8, r7, r6, r5);
- // Setup frame pointer for the frame to be pushed.
+ // Set up frame pointer for the frame to be pushed.
__ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
// If this is the outermost JS call, set js_entry_sp value.
@@ -3814,23 +3953,26 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ bind(&cont);
__ push(ip);
- // Call a faked try-block that does the invoke.
- __ bl(&invoke);
-
- // Caught exception: Store result (exception) in the pending
- // exception field in the JSEnv and return a failure sentinel.
- // Coming in here the fp will be invalid because the PushTryHandler below
- // sets it to 0 to signal the existence of the JSEntry frame.
+ // Jump to a faked try block that does the invoke, with a faked catch
+ // block that sets the pending exception.
+ __ jmp(&invoke);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel. Coming in here the
+ // fp will be invalid because the PushTryHandler below sets it to 0 to
+ // signal the existence of the JSEntry frame.
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
- // Invoke: Link this frame into the handler chain.
+ // Invoke: Link this frame into the handler chain. There's only one
+ // handler block in this code object, so its index is 0.
__ bind(&invoke);
// Must preserve r0-r4, r5-r7 are available.
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bl(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
@@ -3931,7 +4073,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
const Register inline_site = r9;
const Register scratch = r2;
- const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
+ const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;
Label slow, loop, is_instance, is_not_instance, not_js_object;
@@ -3948,11 +4090,9 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// real lookup and update the call site cache.
if (!HasCallSiteInlineCheck()) {
Label miss;
- __ LoadRoot(ip, Heap::kInstanceofCacheFunctionRootIndex);
- __ cmp(function, ip);
+ __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
__ b(ne, &miss);
- __ LoadRoot(ip, Heap::kInstanceofCacheMapRootIndex);
- __ cmp(map, ip);
+ __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
__ b(ne, &miss);
__ LoadRoot(r0, Heap::kInstanceofCacheAnswerRootIndex);
__ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -3982,7 +4122,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(inline_site, lr, scratch);
// Get the map location in scratch and patch it.
__ GetRelocatedValueLocation(inline_site, scratch);
- __ str(map, MemOperand(scratch));
+ __ ldr(scratch, MemOperand(scratch));
+ __ str(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
}
// Register mapping: r3 is object map and r4 is function prototype.
@@ -4096,7 +4237,7 @@ Register InstanceofStub::right() { return r1; }
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// The displacement is the offset of the last parameter (if any)
// relative to the frame pointer.
- static const int kDisplacement =
+ const int kDisplacement =
StandardFrameConstants::kCallerSPOffset - kPointerSize;
// Check that the key is a smi.
@@ -4251,7 +4392,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ str(r3, FieldMemOperand(r0, i));
}
- // Setup the callee in-object property.
+ // Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ ldr(r3, MemOperand(sp, 2 * kPointerSize));
const int kCalleeOffset = JSObject::kHeaderSize +
@@ -4264,7 +4405,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Heap::kArgumentsLengthIndex * kPointerSize;
__ str(r2, FieldMemOperand(r0, kLengthOffset));
- // Setup the elements pointer in the allocated arguments object.
+ // Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, r4 will point there, otherwise
// it will point to the backing store.
__ add(r4, r0, Operand(Heap::kArgumentsObjectSize));
@@ -4359,7 +4500,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ Ret();
// Do the runtime call to allocate the arguments object.
- // r2 = argument count (taggged)
+ // r2 = argument count (tagged)
__ bind(&runtime);
__ str(r2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
@@ -4432,7 +4573,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the parameters pointer from the stack.
__ ldr(r2, MemOperand(sp, 1 * kPointerSize));
- // Setup the elements pointer in the allocated arguments object and
+ // Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ add(r4, r0, Operand(Heap::kArgumentsObjectSizeStrict));
__ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
@@ -4444,7 +4585,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Copy the fixed array slots.
Label loop;
- // Setup r4 to point to the first array slot.
+ // Set up r4 to point to the first array slot.
__ add(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ bind(&loop);
// Pre-decrement r2 with kPointerSize on each iteration.
@@ -4481,10 +4622,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// sp[8]: subject string
// sp[12]: JSRegExp object
- static const int kLastMatchInfoOffset = 0 * kPointerSize;
- static const int kPreviousIndexOffset = 1 * kPointerSize;
- static const int kSubjectOffset = 2 * kPointerSize;
- static const int kJSRegExpOffset = 3 * kPointerSize;
+ const int kLastMatchInfoOffset = 0 * kPointerSize;
+ const int kPreviousIndexOffset = 1 * kPointerSize;
+ const int kSubjectOffset = 2 * kPointerSize;
+ const int kJSRegExpOffset = 3 * kPointerSize;
Label runtime, invoke_regexp;
@@ -4505,7 +4646,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
ExternalReference::address_of_regexp_stack_memory_size(isolate);
__ mov(r0, Operand(address_of_regexp_stack_memory_size));
__ ldr(r0, MemOperand(r0, 0));
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
__ b(eq, &runtime);
// Check that the first argument is a JSRegExp object.
@@ -4576,8 +4717,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ ldr(last_match_info_elements,
FieldMemOperand(r0, JSArray::kElementsOffset));
__ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
- __ cmp(r0, ip);
+ __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex);
__ b(ne, &runtime);
// Check that the last match info has space for the capture registers and the
// additional information.
@@ -4595,25 +4735,39 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Label seq_string;
__ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
- // First check for flat string.
- __ and_(r1, r0, Operand(kIsNotStringMask | kStringRepresentationMask), SetCC);
+ // First check for flat string. None of the following string type tests will
+ // succeed if subject is not a string or a short external string.
+ __ and_(r1,
+ r0,
+ Operand(kIsNotStringMask |
+ kStringRepresentationMask |
+ kShortExternalStringMask),
+ SetCC);
STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
__ b(eq, &seq_string);
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
+ // r1: whether subject is a string and if yes, its string representation
// Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
// In the case of a sliced string its offset has to be taken into account.
- Label cons_string, check_encoding;
+ Label cons_string, external_string, check_encoding;
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
+ STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
__ cmp(r1, Operand(kExternalStringTag));
__ b(lt, &cons_string);
- __ b(eq, &runtime);
+ __ b(eq, &external_string);
+
+ // Catch non-string subject or short external string.
+ STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
+ __ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask));
+ __ b(ne, &runtime);
// String is sliced.
__ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
@@ -4624,8 +4778,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// String is a cons string, check whether it is flat.
__ bind(&cons_string);
__ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
- __ LoadRoot(r1, Heap::kEmptyStringRootIndex);
- __ cmp(r0, r1);
+ __ CompareRoot(r0, Heap::kEmptyStringRootIndex);
__ b(ne, &runtime);
__ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
// Is first part of cons or parent of slice a flat string?
@@ -4634,7 +4787,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r0, Operand(kStringRepresentationMask));
- __ b(ne, &runtime);
+ __ b(ne, &external_string);
+
__ bind(&seq_string);
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
@@ -4670,8 +4824,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
- static const int kParameterRegisters = 4;
+ const int kRegExpExecuteArguments = 8;
+ const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
// Stack pointer now points to cell where return address is to be written.
@@ -4766,10 +4920,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Label termination_exception;
__ b(eq, &termination_exception);
- __ Throw(r0); // Expects thrown value in r0.
+ __ Throw(r0);
__ bind(&termination_exception);
- __ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0.
+ __ ThrowUncatchable(r0);
__ bind(&failure);
// For failure and exception return null.
@@ -4842,6 +4996,26 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ add(sp, sp, Operand(4 * kPointerSize));
__ Ret();
+ // External string. Short external strings have already been ruled out.
+ // r0: scratch
+ __ bind(&external_string);
+ __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
+ __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ tst(r0, Operand(kIsIndirectStringMask));
+ __ Assert(eq, "external string expected, but not found");
+ }
+ __ ldr(subject,
+ FieldMemOperand(subject, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ sub(subject,
+ subject,
+ Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ __ jmp(&seq_string);
+
// Do the runtime call to execute the regexp.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
@@ -4897,11 +5071,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set input, index and length fields from arguments.
__ ldr(r1, MemOperand(sp, kPointerSize * 0));
+ __ ldr(r2, MemOperand(sp, kPointerSize * 1));
+ __ ldr(r6, MemOperand(sp, kPointerSize * 2));
__ str(r1, FieldMemOperand(r0, JSRegExpResult::kInputOffset));
- __ ldr(r1, MemOperand(sp, kPointerSize * 1));
- __ str(r1, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
- __ ldr(r1, MemOperand(sp, kPointerSize * 2));
- __ str(r1, FieldMemOperand(r0, JSArray::kLengthOffset));
+ __ str(r2, FieldMemOperand(r0, JSRegExpResult::kIndexOffset));
+ __ str(r6, FieldMemOperand(r0, JSArray::kLengthOffset));
// Fill out the elements FixedArray.
// r0: JSArray, tagged.
@@ -4923,9 +5097,9 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// r3: Start of elements in FixedArray.
// r5: Number of elements to fill.
Label loop;
- __ tst(r5, Operand(r5));
+ __ cmp(r5, Operand(0));
__ bind(&loop);
- __ b(le, &done); // Jump if r1 is negative or zero.
+ __ b(le, &done); // Jump if r5 is negative or zero.
__ sub(r5, r5, Operand(1), SetCC);
__ str(r2, MemOperand(r3, r5, LSL, kPointerSizeLog2));
__ jmp(&loop);
@@ -4939,23 +5113,48 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
}
-void CallFunctionStub::FinishCode(Code* code) {
- code->set_has_function_cache(false);
-}
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+ // Cache the called function in a global property cell. Cache states
+ // are uninitialized, monomorphic (indicated by a JSFunction), and
+ // megamorphic.
+ // r1 : the function to call
+ // r2 : cache cell for call target
+ Label done;
+ ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
+ masm->isolate()->heap()->undefined_value());
+ ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
+ masm->isolate()->heap()->the_hole_value());
-void CallFunctionStub::Clear(Heap* heap, Address address) {
- UNREACHABLE();
-}
+ // Load the cache state into r3.
+ __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+
+ // A monomorphic cache hit or an already megamorphic state: invoke the
+ // function without changing the state.
+ __ cmp(r3, r1);
+ __ b(eq, &done);
+ __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
+ __ b(eq, &done);
+ // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+ // megamorphic.
+ __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
+ // MegamorphicSentinel is an immortal immovable object (undefined) so no
+ // write-barrier is needed.
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex, ne);
+ __ str(ip, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset), ne);
-Object* CallFunctionStub::GetCachedValue(Address address) {
- UNREACHABLE();
- return NULL;
+ // An uninitialized cache is patched with the function.
+ __ str(r1, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset), eq);
+ // No need for a write barrier here - cells are rescanned.
+
+ __ bind(&done);
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
+ // r1 : the function to call
+ // r2 : cache cell for call target
Label slow, non_function;
// The receiver might implicitly be the global object. This is
@@ -4970,16 +5169,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
__ b(ne, &call);
// Patch the receiver on the stack with the global receiver object.
- __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
- __ str(r1, MemOperand(sp, argc_ * kPointerSize));
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
+ __ str(r2, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
}
- // Get the function to call from the stack.
- // function, receiver [, arguments]
- __ ldr(r1, MemOperand(sp, (argc_ + 1) * kPointerSize));
-
// Check that the function is really a JavaScript function.
// r1: pushed function (to be verified)
__ JumpIfSmi(r1, &non_function);
@@ -5017,7 +5212,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ mov(r0, Operand(argc_ + 1, RelocInfo::NONE));
__ mov(r2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
- __ SetCallKind(r5, CALL_AS_FUNCTION);
+ __ SetCallKind(r5, CALL_AS_METHOD);
{
Handle<Code> adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
@@ -5028,7 +5223,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// of the original receiver from the call site).
__ bind(&non_function);
__ str(r1, MemOperand(sp, argc_ * kPointerSize));
- __ mov(r0, Operand(argc_)); // Setup the number of arguments.
+ __ mov(r0, Operand(argc_)); // Set up the number of arguments.
__ mov(r2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
__ SetCallKind(r5, CALL_AS_METHOD);
@@ -5037,6 +5232,48 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
}
+void CallConstructStub::Generate(MacroAssembler* masm) {
+ // r0 : number of arguments
+ // r1 : the function to call
+ // r2 : cache cell for call target
+ Label slow, non_function_call;
+
+ // Check that the function is not a smi.
+ __ JumpIfSmi(r1, &non_function_call);
+ // Check that the function is a JSFunction.
+ __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE);
+ __ b(ne, &slow);
+
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
+ // Jump to the function-specific construct stub.
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kConstructStubOffset));
+ __ add(pc, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
+
+ // r0: number of arguments
+ // r1: called object
+ // r3: object type
+ Label do_call;
+ __ bind(&slow);
+ __ cmp(r3, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ b(ne, &non_function_call);
+ __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
+ __ jmp(&do_call);
+
+ __ bind(&non_function_call);
+ __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
+ __ bind(&do_call);
+ // Set expected number of arguments to zero (not changing r0).
+ __ mov(r2, Operand(0, RelocInfo::NONE));
+ __ SetCallKind(r5, CALL_AS_METHOD);
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ RelocInfo::CODE_TARGET);
+}
+
+
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
void CompareStub::PrintName(StringStream* stream) {
@@ -5098,84 +5335,21 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// If the index is non-smi trigger the non-smi case.
__ JumpIfNotSmi(index_, &index_not_smi_);
-
- // Put smi-tagged index into scratch register.
- __ mov(scratch_, index_);
__ bind(&got_smi_index_);
// Check for index out of range.
__ ldr(ip, FieldMemOperand(object_, String::kLengthOffset));
- __ cmp(ip, Operand(scratch_));
+ __ cmp(ip, Operand(index_));
__ b(ls, index_out_of_range_);
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(result_, Operand(kStringRepresentationMask));
- __ b(eq, &flat_string);
+ __ mov(index_, Operand(index_, ASR, kSmiTagSize));
- // Handle non-flat strings.
- __ and_(result_, result_, Operand(kStringRepresentationMask));
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
- __ cmp(result_, Operand(kExternalStringTag));
- __ b(gt, &sliced_string);
- __ b(eq, &call_runtime_);
-
- // ConsString.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- Label assure_seq_string;
- __ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
- __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
- __ cmp(result_, Operand(ip));
- __ b(ne, &call_runtime_);
- // Get the first of the two strings and load its instance type.
- __ ldr(result_, FieldMemOperand(object_, ConsString::kFirstOffset));
- __ jmp(&assure_seq_string);
-
- // SlicedString, unpack and add offset.
- __ bind(&sliced_string);
- __ ldr(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset));
- __ add(scratch_, scratch_, result_);
- __ ldr(result_, FieldMemOperand(object_, SlicedString::kParentOffset));
+ StringCharLoadGenerator::Generate(masm,
+ object_,
+ index_,
+ result_,
+ &call_runtime_);
- // Assure that we are dealing with a sequential string. Go to runtime if not.
- __ bind(&assure_seq_string);
- __ ldr(result_, FieldMemOperand(result_, HeapObject::kMapOffset));
- __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
- // Check that parent is not an external string. Go to runtime otherwise.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(result_, Operand(kStringRepresentationMask));
- __ b(ne, &call_runtime_);
- // Actually fetch the parent string if it is confirmed to be sequential.
- STATIC_ASSERT(SlicedString::kParentOffset == ConsString::kFirstOffset);
- __ ldr(object_, FieldMemOperand(object_, SlicedString::kParentOffset));
-
- // Check for 1-byte or 2-byte string.
- __ bind(&flat_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ tst(result_, Operand(kStringEncodingMask));
- __ b(ne, &ascii_string);
-
- // 2-byte string.
- // Load the 2-byte character code into the result register. We can
- // add without shifting since the smi tag size is the log2 of the
- // number of bytes in a two-byte character.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
- __ add(scratch_, object_, Operand(scratch_));
- __ ldrh(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
- __ jmp(&got_char_code);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ add(scratch_, object_, Operand(scratch_, LSR, kSmiTagSize));
- __ ldrb(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize));
-
- __ bind(&got_char_code);
__ mov(result_, Operand(result_, LSL, kSmiTagSize));
__ bind(&exit_);
}
@@ -5190,12 +5364,12 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ bind(&index_not_smi_);
// If index is a heap number, try converting it to an integer.
__ CheckMap(index_,
- scratch_,
+ result_,
Heap::kHeapNumberMapRootIndex,
index_not_number_,
DONT_DO_SMI_CHECK);
call_helper.BeforeCall(masm);
- __ Push(object_, index_);
+ __ push(object_);
__ push(index_); // Consumed by runtime conversion function.
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
__ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
@@ -5206,15 +5380,14 @@ void StringCharCodeAtGenerator::GenerateSlow(
}
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
- __ Move(scratch_, r0);
- __ pop(index_);
+ __ Move(index_, r0);
__ pop(object_);
// Reload the instance type.
__ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
__ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
call_helper.AfterCall(masm);
// If index is still not a smi, it must be out of range.
- __ JumpIfNotSmi(scratch_, index_out_of_range_);
+ __ JumpIfNotSmi(index_, index_out_of_range_);
// Otherwise, return to the fast path.
__ jmp(&got_smi_index_);
@@ -5223,6 +5396,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
// is too complex (e.g., when the string needs to be flattened).
__ bind(&call_runtime_);
call_helper.BeforeCall(masm);
+ __ mov(index_, Operand(index_, LSL, kSmiTagSize));
__ Push(object_, index_);
__ CallRuntime(Runtime::kStringCharCodeAt, 2);
__ Move(result_, r0);
@@ -5251,8 +5425,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTag == 0);
__ add(result_, result_, Operand(code_, LSL, kPointerSizeLog2 - kSmiTagSize));
__ ldr(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(result_, Operand(ip));
+ __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
__ b(eq, &slow_case_);
__ bind(&exit_);
}
@@ -5292,70 +5465,6 @@ void StringCharAtGenerator::GenerateSlow(
}
-class StringHelper : public AllStatic {
- public:
- // Generate code for copying characters using a simple loop. This should only
- // be used in places where the number of characters is small and the
- // additional setup and checking in GenerateCopyCharactersLong adds too much
- // overhead. Copying of overlapping regions is not supported.
- // Dest register ends at the position after the last character written.
- static void GenerateCopyCharacters(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- Register scratch,
- bool ascii);
-
- // Generate code for copying a large number of characters. This function
- // is allowed to spend extra time setting up conditions to make copying
- // faster. Copying of overlapping regions is not supported.
- // Dest register ends at the position after the last character written.
- static void GenerateCopyCharactersLong(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Register scratch4,
- Register scratch5,
- int flags);
-
-
- // Probe the symbol table for a two character string. If the string is
- // not found by probing a jump to the label not_found is performed. This jump
- // does not guarantee that the string is not in the symbol table. If the
- // string is found the code falls through with the string in register r0.
- // Contents of both c1 and c2 registers are modified. At the exit c1 is
- // guaranteed to contain halfword with low and high bytes equal to
- // initial contents of c1 and c2 respectively.
- static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
- Register c1,
- Register c2,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Register scratch4,
- Register scratch5,
- Label* not_found);
-
- // Generate string hash.
- static void GenerateHashInit(MacroAssembler* masm,
- Register hash,
- Register character);
-
- static void GenerateHashAddCharacter(MacroAssembler* masm,
- Register hash,
- Register character);
-
- static void GenerateHashGetHash(MacroAssembler* masm,
- Register hash);
-
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
-};
-
-
void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
Register dest,
Register src,
@@ -5605,12 +5714,11 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// scratch: -
// Perform a number of probes in the symbol table.
- static const int kProbes = 4;
+ const int kProbes = 4;
Label found_in_symbol_table;
Label next_probe[kProbes];
+ Register candidate = scratch5; // Scratch register contains candidate.
for (int i = 0; i < kProbes; i++) {
- Register candidate = scratch5; // Scratch register contains candidate.
-
// Calculate entry in symbol table.
if (i > 0) {
__ add(candidate, hash, Operand(SymbolTable::GetProbeOffset(i)));
@@ -5635,11 +5743,11 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ cmp(undefined, candidate);
__ b(eq, not_found);
- // Must be null (deleted entry).
+ // Must be the hole (deleted entry).
if (FLAG_debug_code) {
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
__ cmp(ip, candidate);
- __ Assert(eq, "oddball in symbol table is not undefined or null");
+ __ Assert(eq, "oddball in symbol table is not undefined or the hole");
}
__ jmp(&next_probe[i]);
@@ -5667,7 +5775,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ jmp(not_found);
// Scratch register contains result when we fall through to here.
- Register result = scratch;
+ Register result = candidate;
__ bind(&found_in_symbol_table);
__ Move(r0, result);
}
@@ -5677,9 +5785,13 @@ void StringHelper::GenerateHashInit(MacroAssembler* masm,
Register hash,
Register character) {
// hash = character + (character << 10);
- __ add(hash, character, Operand(character, LSL, 10));
+ __ LoadRoot(hash, Heap::kHashSeedRootIndex);
+ // Untag smi seed and add the character.
+ __ add(hash, character, Operand(hash, LSR, kSmiTagSize));
+ // hash += hash << 10;
+ __ add(hash, hash, Operand(hash, LSL, 10));
// hash ^= hash >> 6;
- __ eor(hash, hash, Operand(hash, ASR, 6));
+ __ eor(hash, hash, Operand(hash, LSR, 6));
}
@@ -5691,7 +5803,7 @@ void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
// hash += hash << 10;
__ add(hash, hash, Operand(hash, LSL, 10));
// hash ^= hash >> 6;
- __ eor(hash, hash, Operand(hash, ASR, 6));
+ __ eor(hash, hash, Operand(hash, LSR, 6));
}
@@ -5700,12 +5812,14 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
// hash += hash << 3;
__ add(hash, hash, Operand(hash, LSL, 3));
// hash ^= hash >> 11;
- __ eor(hash, hash, Operand(hash, ASR, 11));
+ __ eor(hash, hash, Operand(hash, LSR, 11));
// hash += hash << 15;
- __ add(hash, hash, Operand(hash, LSL, 15), SetCC);
+ __ add(hash, hash, Operand(hash, LSL, 15));
+
+ __ and_(hash, hash, Operand(String::kHashBitMask), SetCC);
// if (hash == 0) hash = 27;
- __ mov(hash, Operand(27), LeaveCC, ne);
+ __ mov(hash, Operand(StringHasher::kZeroHash), LeaveCC, eq);
}
@@ -5725,41 +5839,28 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// 0 <= from <= to <= string.length.
// If any of these assumptions fail, we call the runtime system.
- static const int kToOffset = 0 * kPointerSize;
- static const int kFromOffset = 1 * kPointerSize;
- static const int kStringOffset = 2 * kPointerSize;
-
- // Check bounds and smi-ness.
- Register to = r6;
- Register from = r7;
+ const int kToOffset = 0 * kPointerSize;
+ const int kFromOffset = 1 * kPointerSize;
+ const int kStringOffset = 2 * kPointerSize;
- __ Ldrd(to, from, MemOperand(sp, kToOffset));
+ __ Ldrd(r2, r3, MemOperand(sp, kToOffset));
STATIC_ASSERT(kFromOffset == kToOffset + 4);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
// I.e., arithmetic shift right by one un-smi-tags.
- __ mov(r2, Operand(to, ASR, 1), SetCC);
- __ mov(r3, Operand(from, ASR, 1), SetCC, cc);
+ __ mov(r2, Operand(r2, ASR, 1), SetCC);
+ __ mov(r3, Operand(r3, ASR, 1), SetCC, cc);
// If either to or from had the smi tag bit set, then carry is set now.
__ b(cs, &runtime); // Either "from" or "to" is not a smi.
- __ b(mi, &runtime); // From is negative.
-
- // Both to and from are smis.
- __ sub(r2, r2, Operand(r3), SetCC);
+ // We want to bailout to runtime here if From is negative. In that case, the
+ // next instruction is not executed and we fall through to bailing out to
+ // runtime. pl is the opposite of mi.
+ // Both r2 and r3 are untagged integers.
+ __ sub(r2, r2, Operand(r3), SetCC, pl);
__ b(mi, &runtime); // Fail if from > to.
- // Special handling of sub-strings of length 1 and 2. One character strings
- // are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache in
- // generated code.
- __ cmp(r2, Operand(2));
- __ b(lt, &runtime);
- // r2: result string length
- // r3: from index (untagged smi)
- // r6 (a.k.a. to): to (smi)
- // r7 (a.k.a. from): from offset (smi)
- // Make sure first argument is a sequential (or flat) string.
+ // Make sure first argument is a string.
__ ldr(r0, MemOperand(sp, kStringOffset));
STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(r0, &runtime);
@@ -5774,67 +5875,15 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ cmp(r2, Operand(r4, ASR, 1));
__ b(eq, &return_r0);
- Label create_slice;
- if (FLAG_string_slices) {
- __ cmp(r2, Operand(SlicedString::kMinLength));
- __ b(ge, &create_slice);
- }
-
- // r0: original string
- // r1: instance type
- // r2: result string length
- // r3: from index (untagged smi)
- // r6 (a.k.a. to): to (smi)
- // r7 (a.k.a. from): from offset (smi)
- Label seq_string;
- __ and_(r4, r1, Operand(kStringRepresentationMask));
- STATIC_ASSERT(kSeqStringTag < kConsStringTag);
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kConsStringTag < kSlicedStringTag);
- __ cmp(r4, Operand(kConsStringTag));
- __ b(gt, &runtime); // Slices and external strings go to runtime.
- __ b(lt, &seq_string); // Sequential strings are handled directly.
-
- // Cons string. Try to recurse (once) on the first substring.
- // (This adds a little more generality than necessary to handle flattened
- // cons strings, but not much).
- __ ldr(r0, FieldMemOperand(r0, ConsString::kFirstOffset));
- __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
- __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
- __ tst(r1, Operand(kStringRepresentationMask));
- STATIC_ASSERT(kSeqStringTag == 0);
- __ b(ne, &runtime); // Cons, slices and external strings go to runtime.
-
- // Definitly a sequential string.
- __ bind(&seq_string);
-
- // r0: original string
- // r1: instance type
- // r2: result string length
- // r3: from index (untagged smi)
- // r6 (a.k.a. to): to (smi)
- // r7 (a.k.a. from): from offset (smi)
- __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
- __ cmp(r4, Operand(to));
- __ b(lt, &runtime); // Fail if to > length.
- to = no_reg;
-
- // r0: original string or left hand side of the original cons string.
- // r1: instance type
- // r2: result string length
- // r3: from index (untagged smi)
- // r7 (a.k.a. from): from offset (smi)
- // Check for flat ASCII string.
- Label non_ascii_flat;
- __ tst(r1, Operand(kStringEncodingMask));
- STATIC_ASSERT(kTwoByteStringTag == 0);
- __ b(eq, &non_ascii_flat);
-
Label result_longer_than_two;
+ // Check for special case of two character ASCII string, in which case
+ // we do a lookup in the symbol table first.
__ cmp(r2, Operand(2));
__ b(gt, &result_longer_than_two);
+ __ b(lt, &runtime);
+
+ __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime);
- // Sub string of length 2 requested.
// Get the two characters forming the sub string.
__ add(r0, r0, Operand(r3));
__ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
@@ -5844,7 +5893,6 @@ void SubStringStub::Generate(MacroAssembler* masm) {
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
- Counters* counters = masm->isolate()->counters();
__ jmp(&return_r0);
// r2: result string length.
@@ -5855,18 +5903,114 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ jmp(&return_r0);
__ bind(&result_longer_than_two);
+ // Deal with different string types: update the index if necessary
+ // and put the underlying string into r5.
+ // r0: original string
+ // r1: instance type
+ // r2: length
+ // r3: from index (untagged)
+ Label underlying_unpacked, sliced_string, seq_or_external_string;
+ // If the string is not indirect, it can only be sequential or external.
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ tst(r1, Operand(kIsIndirectStringMask));
+ __ b(eq, &seq_or_external_string);
+
+ __ tst(r1, Operand(kSlicedNotConsMask));
+ __ b(ne, &sliced_string);
+ // Cons string. Check whether it is flat, then fetch first part.
+ __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
+ __ CompareRoot(r5, Heap::kEmptyStringRootIndex);
+ __ b(ne, &runtime);
+ __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
+ // Update instance type.
+ __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
+ __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked);
- // Locate 'from' character of string.
- __ add(r5, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ add(r5, r5, Operand(from, ASR, 1));
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and correct start index by offset.
+ __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ __ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
+ __ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
+ // Update instance type.
+ __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
+ __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked);
- // Allocate the result.
- __ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime);
+ __ bind(&seq_or_external_string);
+ // Sequential or external string. Just move string to the expected register.
+ __ mov(r5, r0);
- // r0: result string
- // r2: result string length
- // r5: first character of substring to copy
- // r7 (a.k.a. from): from offset (smi)
+ __ bind(&underlying_unpacked);
+
+ if (FLAG_string_slices) {
+ Label copy_routine;
+ // r5: underlying subject string
+ // r1: instance type of underlying subject string
+ // r2: length
+ // r3: adjusted start index (untagged)
+ __ cmp(r2, Operand(SlicedString::kMinLength));
+ // Short slice. Copy instead of slicing.
+ __ b(lt, &copy_routine);
+ // Allocate new sliced string. At this point we do not reload the instance
+ // type including the string encoding because we simply rely on the info
+ // provided by the original string. It does not matter if the original
+ // string's encoding is wrong because we always have to recheck encoding of
+ // the newly created string's parent anyways due to externalized strings.
+ Label two_byte_slice, set_slice_header;
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+ __ tst(r1, Operand(kStringEncodingMask));
+ __ b(eq, &two_byte_slice);
+ __ AllocateAsciiSlicedString(r0, r2, r6, r7, &runtime);
+ __ jmp(&set_slice_header);
+ __ bind(&two_byte_slice);
+ __ AllocateTwoByteSlicedString(r0, r2, r6, r7, &runtime);
+ __ bind(&set_slice_header);
+ __ mov(r3, Operand(r3, LSL, 1));
+ __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
+ __ jmp(&return_r0);
+
+ __ bind(&copy_routine);
+ }
+
+ // r5: underlying subject string
+ // r1: instance type of underlying subject string
+ // r2: length
+ // r3: adjusted start index (untagged)
+ Label two_byte_sequential, sequential_string, allocate_result;
+ STATIC_ASSERT(kExternalStringTag != 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ tst(r1, Operand(kExternalStringTag));
+ __ b(eq, &sequential_string);
+
+ // Handle external string.
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ tst(r1, Operand(kShortExternalStringTag));
+ __ b(ne, &runtime);
+ __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
+ // r5 already points to the first character of underlying string.
+ __ jmp(&allocate_result);
+
+ __ bind(&sequential_string);
+ // Locate first character of underlying subject string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+
+ __ bind(&allocate_result);
+ // Sequential acii string. Allocate the result.
+ STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ __ tst(r1, Operand(kStringEncodingMask));
+ __ b(eq, &two_byte_sequential);
+
+ // Allocate and copy the resulting ASCII string.
+ __ AllocateAsciiString(r0, r2, r4, r6, r7, &runtime);
+
+ // Locate first character of substring to copy.
+ __ add(r5, r5, r3);
// Locate first character of result.
__ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
@@ -5879,30 +6023,16 @@ void SubStringStub::Generate(MacroAssembler* masm) {
COPY_ASCII | DEST_ALWAYS_ALIGNED);
__ jmp(&return_r0);
- __ bind(&non_ascii_flat);
- // r0: original string
- // r2: result string length
- // r7 (a.k.a. from): from offset (smi)
- // Check for flat two byte string.
+ // Allocate and copy the resulting two-byte string.
+ __ bind(&two_byte_sequential);
+ __ AllocateTwoByteString(r0, r2, r4, r6, r7, &runtime);
- // Locate 'from' character of string.
- __ add(r5, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // As "from" is a smi it is 2 times the value which matches the size of a two
- // byte character.
+ // Locate first character of substring to copy.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ add(r5, r5, Operand(from));
-
- // Allocate the result.
- __ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime);
-
- // r0: result string
- // r2: result string length
- // r5: first character of substring to copy
+ __ add(r5, r5, Operand(r3, LSL, 1));
// Locate first character of result.
__ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- from = no_reg;
-
// r0: result string.
// r1: first character of result.
// r2: result length.
@@ -5910,72 +6040,9 @@ void SubStringStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, r1, r5, r2, r3, r4, r6, r7, r9, DEST_ALWAYS_ALIGNED);
- __ jmp(&return_r0);
-
- if (FLAG_string_slices) {
- __ bind(&create_slice);
- // r0: original string
- // r1: instance type
- // r2: length
- // r3: from index (untagged smi)
- // r6 (a.k.a. to): to (smi)
- // r7 (a.k.a. from): from offset (smi)
- Label allocate_slice, sliced_string, seq_string;
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(r1, Operand(kStringRepresentationMask));
- __ b(eq, &seq_string);
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ tst(r1, Operand(kIsIndirectStringMask));
- // External string. Jump to runtime.
- __ b(eq, &runtime);
-
- __ tst(r1, Operand(kSlicedNotConsMask));
- __ b(ne, &sliced_string);
- // Cons string. Check whether it is flat, then fetch first part.
- __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
- __ LoadRoot(r9, Heap::kEmptyStringRootIndex);
- __ cmp(r5, r9);
- __ b(ne, &runtime);
- __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ ldr(r5, FieldMemOperand(r0, SlicedString::kOffsetOffset));
- __ add(r7, r7, r5);
- __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&seq_string);
- // Sequential string. Just move string to the right register.
- __ mov(r5, r0);
-
- __ bind(&allocate_slice);
- // r1: instance type of original string
- // r2: length
- // r5: underlying subject string
- // r7 (a.k.a. from): from offset (smi)
- // Allocate new sliced string. At this point we do not reload the instance
- // type including the string encoding because we simply rely on the info
- // provided by the original string. It does not matter if the original
- // string's encoding is wrong because we always have to recheck encoding of
- // the newly created string's parent anyways due to externalized strings.
- Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ tst(r1, Operand(kStringEncodingMask));
- __ b(eq, &two_byte_slice);
- __ AllocateAsciiSlicedString(r0, r2, r3, r4, &runtime);
- __ jmp(&set_slice_header);
- __ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(r0, r2, r3, r4, &runtime);
- __ bind(&set_slice_header);
- __ str(r7, FieldMemOperand(r0, SlicedString::kOffsetOffset));
- __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
- }
__ bind(&return_r0);
+ Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
@@ -6008,7 +6075,7 @@ void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
Label compare_chars;
__ bind(&check_zero_length);
STATIC_ASSERT(kSmiTag == 0);
- __ tst(length, Operand(length));
+ __ cmp(length, Operand(0));
__ b(ne, &compare_chars);
__ mov(r0, Operand(Smi::FromInt(EQUAL)));
__ Ret();
@@ -6041,7 +6108,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ mov(scratch1, scratch2, LeaveCC, gt);
Register min_length = scratch1;
STATIC_ASSERT(kSmiTag == 0);
- __ tst(min_length, Operand(min_length));
+ __ cmp(min_length, Operand(0));
__ b(eq, &compare_lengths);
// Compare loop.
@@ -6132,7 +6199,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
void StringAddStub::Generate(MacroAssembler* masm) {
- Label string_add_runtime, call_builtin;
+ Label call_runtime, call_builtin;
Builtins::JavaScript builtin_id = Builtins::ADD;
Counters* counters = masm->isolate()->counters();
@@ -6147,7 +6214,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Make sure that both arguments are strings if not known in advance.
if (flags_ == NO_STRING_ADD_FLAGS) {
- __ JumpIfEitherSmi(r0, r1, &string_add_runtime);
+ __ JumpIfEitherSmi(r0, r1, &call_runtime);
// Load instance types.
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset));
@@ -6157,7 +6224,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// If either is not a string, go to runtime.
__ tst(r4, Operand(kIsNotStringMask));
__ tst(r5, Operand(kIsNotStringMask), eq);
- __ b(ne, &string_add_runtime);
+ __ b(ne, &call_runtime);
} else {
// Here at least one of the arguments is definitely a string.
// We convert the one that is not known to be a string.
@@ -6226,7 +6293,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
}
__ JumpIfBothInstanceTypesAreNotSequentialAscii(r4, r5, r6, r7,
- &string_add_runtime);
+ &call_runtime);
// Get the two characters forming the sub string.
__ ldrb(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
@@ -6248,7 +6315,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// halfword store instruction (which assumes that processor is
// in a little endian mode)
__ mov(r6, Operand(2));
- __ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime);
+ __ AllocateAsciiString(r0, r6, r4, r5, r9, &call_runtime);
__ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
__ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
@@ -6256,14 +6323,14 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ bind(&longer_than_two);
// Check if resulting string will be flat.
- __ cmp(r6, Operand(String::kMinNonFlatLength));
+ __ cmp(r6, Operand(ConsString::kMinLength));
__ b(lt, &string_add_flat_result);
// Handle exceptionally long strings in the runtime system.
STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
ASSERT(IsPowerOf2(String::kMaxLength + 1));
// kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
__ cmp(r6, Operand(String::kMaxLength + 1));
- __ b(hs, &string_add_runtime);
+ __ b(hs, &call_runtime);
// If result is not supposed to be flat, allocate a cons string object.
// If both strings are ASCII the result is an ASCII cons string.
@@ -6281,7 +6348,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Allocate an ASCII cons string.
__ bind(&ascii_data);
- __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime);
+ __ AllocateAsciiConsString(r7, r6, r4, r5, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
__ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
@@ -6306,11 +6373,13 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ b(eq, &ascii_data);
// Allocate a two byte cons string.
- __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime);
+ __ AllocateTwoByteConsString(r7, r6, r4, r5, &call_runtime);
__ jmp(&allocated);
- // Handle creating a flat result. First check that both strings are
- // sequential and that they have the same encoding.
+ // We cannot encounter sliced strings or cons strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
+ // Handle creating a flat result from either external or sequential strings.
+ // Locate the first characters' locations.
// r0: first string
// r1: second string
// r2: length of first string
@@ -6318,6 +6387,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// r4: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
// r5: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
// r6: sum of lengths.
+ Label first_prepared, second_prepared;
__ bind(&string_add_flat_result);
if (flags_ != NO_STRING_ADD_FLAGS) {
__ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
@@ -6325,97 +6395,88 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset));
__ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset));
}
- // Check that both strings are sequential.
+
+ // Check whether both strings have same encoding
+ __ eor(r7, r4, Operand(r5));
+ __ tst(r7, Operand(kStringEncodingMask));
+ __ b(ne, &call_runtime);
+
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r4, Operand(kStringRepresentationMask));
- __ tst(r5, Operand(kStringRepresentationMask), eq);
- __ b(ne, &string_add_runtime);
- // Now check if both strings have the same encoding (ASCII/Two-byte).
- // r0: first string.
- // r1: second string.
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ add(r7,
+ r0,
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag),
+ LeaveCC,
+ eq);
+ __ b(eq, &first_prepared);
+ // External string: rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ tst(r4, Operand(kShortExternalStringMask));
+ __ b(ne, &call_runtime);
+ __ ldr(r7, FieldMemOperand(r0, ExternalString::kResourceDataOffset));
+ __ bind(&first_prepared);
+
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ tst(r5, Operand(kStringRepresentationMask));
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ add(r1,
+ r1,
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag),
+ LeaveCC,
+ eq);
+ __ b(eq, &second_prepared);
+ // External string: rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ tst(r5, Operand(kShortExternalStringMask));
+ __ b(ne, &call_runtime);
+ __ ldr(r1, FieldMemOperand(r1, ExternalString::kResourceDataOffset));
+ __ bind(&second_prepared);
+
+ Label non_ascii_string_add_flat_result;
+ // r7: first character of first string
+ // r1: first character of second string
// r2: length of first string.
// r3: length of second string.
- // r6: sum of lengths..
- Label non_ascii_string_add_flat_result;
- ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test.
- __ eor(r7, r4, Operand(r5));
- __ tst(r7, Operand(kStringEncodingMask));
- __ b(ne, &string_add_runtime);
- // And see if it's ASCII or two-byte.
- __ tst(r4, Operand(kStringEncodingMask));
+ // r6: sum of lengths.
+ // Both strings have the same encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ tst(r5, Operand(kStringEncodingMask));
__ b(eq, &non_ascii_string_add_flat_result);
- // Both strings are sequential ASCII strings. We also know that they are
- // short (since the sum of the lengths is less than kMinNonFlatLength).
- // r6: length of resulting flat string
- __ AllocateAsciiString(r7, r6, r4, r5, r9, &string_add_runtime);
- // Locate first character of result.
- __ add(r6, r7, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument.
- __ add(r0, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // r0: first character of first string.
- // r1: second string.
+ __ AllocateAsciiString(r0, r6, r4, r5, r9, &call_runtime);
+ __ add(r6, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ // r0: result string.
+ // r7: first character of first string.
+ // r1: first character of second string.
// r2: length of first string.
// r3: length of second string.
// r6: first character of result.
- // r7: result string.
- StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, true);
-
- // Load second argument and locate first character.
- __ add(r1, r1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // r1: first character of second string.
- // r3: length of second string.
+ StringHelper::GenerateCopyCharacters(masm, r6, r7, r2, r4, true);
// r6: next character of result.
- // r7: result string.
StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
- __ mov(r0, Operand(r7));
__ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
__ bind(&non_ascii_string_add_flat_result);
- // Both strings are sequential two byte strings.
- // r0: first string.
- // r1: second string.
- // r2: length of first string.
- // r3: length of second string.
- // r6: sum of length of strings.
- __ AllocateTwoByteString(r7, r6, r4, r5, r9, &string_add_runtime);
- // r0: first string.
- // r1: second string.
- // r2: length of first string.
- // r3: length of second string.
- // r7: result string.
-
- // Locate first character of result.
- __ add(r6, r7, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument.
- __ add(r0, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // r0: first character of first string.
- // r1: second string.
+ __ AllocateTwoByteString(r0, r6, r4, r5, r9, &call_runtime);
+ __ add(r6, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ // r0: result string.
+ // r7: first character of first string.
+ // r1: first character of second string.
// r2: length of first string.
// r3: length of second string.
// r6: first character of result.
- // r7: result string.
- StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, false);
-
- // Locate first character of second argument.
- __ add(r1, r1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // r1: first character of second string.
- // r3: length of second string.
- // r6: next character of result (after copy of first string).
- // r7: result string.
+ StringHelper::GenerateCopyCharacters(masm, r6, r7, r2, r4, false);
+ // r6: next character of result.
StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
-
- __ mov(r0, Operand(r7));
__ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
// Just jump to runtime to add the two strings.
- __ bind(&string_add_runtime);
+ __ bind(&call_runtime);
__ TailCallRuntime(Runtime::kStringAdd, 2, 1);
if (call_builtin.is_linked()) {
@@ -6499,15 +6560,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ and_(r2, r1, Operand(r0));
__ JumpIfSmi(r2, &generic_stub);
__ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE);
- __ b(ne, &miss);
+ __ b(ne, &maybe_undefined1);
__ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
- __ b(ne, &miss);
+ __ b(ne, &maybe_undefined2);
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or VFP3 is unsupported.
@@ -6531,14 +6592,28 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ mov(r0, Operand(LESS), LeaveCC, lt);
__ mov(r0, Operand(GREATER), LeaveCC, gt);
__ Ret();
-
- __ bind(&unordered);
}
+ __ bind(&unordered);
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
__ bind(&generic_stub);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
+ __ b(ne, &miss);
+ __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
+ __ b(ne, &maybe_undefined2);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ CompareRoot(r1, Heap::kUndefinedValueRootIndex);
+ __ b(eq, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -6586,6 +6661,8 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = r1;
Register right = r0;
@@ -6619,28 +6696,39 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// Check that both strings are symbols. If they are, we're done
// because we already know they are not identical.
- ASSERT(GetCondition() == eq);
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp3, tmp1, Operand(tmp2));
- __ tst(tmp3, Operand(kIsSymbolMask));
- // Make sure r0 is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(r0));
- __ Ret(ne);
+ if (equality) {
+ ASSERT(GetCondition() == eq);
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp3, tmp1, Operand(tmp2));
+ __ tst(tmp3, Operand(kIsSymbolMask));
+ // Make sure r0 is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(r0));
+ __ Ret(ne);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ JumpIfBothInstanceTypesAreNotSequentialAscii(tmp1, tmp2, tmp3, tmp4,
- &runtime);
+ __ JumpIfBothInstanceTypesAreNotSequentialAscii(
+ tmp1, tmp2, tmp3, tmp4, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2, tmp3);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2, tmp3);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3, tmp4);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -6653,18 +6741,10 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ and_(r2, r1, Operand(r0));
__ JumpIfSmi(r2, &miss);
- __ CompareObjectType(r0, r2, r3, JS_OBJECT_TYPE);
+ __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE);
__ b(ne, &miss);
- __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
- __ and_(r2, r2, Operand(1 << Map::kUseUserObjectComparison));
- __ cmp(r2, Operand(1 << Map::kUseUserObjectComparison));
- __ b(eq, &miss);
- __ CompareObjectType(r1, r2, r3, JS_OBJECT_TYPE);
+ __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE);
__ b(ne, &miss);
- __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
- __ and_(r2, r2, Operand(1 << Map::kUseUserObjectComparison));
- __ cmp(r2, Operand(1 << Map::kUseUserObjectComparison));
- __ b(eq, &miss);
ASSERT(GetCondition() == eq);
__ sub(r0, r0, Operand(r1));
@@ -6675,26 +6755,47 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
}
-void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
- __ Push(r1, r0);
- __ push(lr);
+void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
+ Label miss;
+ __ and_(r2, r1, Operand(r0));
+ __ JumpIfSmi(r2, &miss);
+ __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
+ __ cmp(r2, Operand(known_map_));
+ __ b(ne, &miss);
+ __ cmp(r3, Operand(known_map_));
+ __ b(ne, &miss);
- // Call the runtime system in a fresh internal frame.
- ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ __ sub(r0, r0, Operand(r1));
+ __ Ret();
+
+ __ bind(&miss);
+ GenerateMiss(masm);
+}
+
+
+
+void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
+ // Call the runtime system in a fresh internal frame.
+ ExternalReference miss =
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(r1, r0);
+ __ push(lr);
+ __ Push(r1, r0);
__ mov(ip, Operand(Smi::FromInt(op_)));
__ push(ip);
__ CallExternalReference(miss, 3);
+ // Compute the entry point of the rewritten stub.
+ __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Restore registers.
+ __ pop(lr);
+ __ pop(r0);
+ __ pop(r1);
}
- // Compute the entry point of the rewritten stub.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
- // Restore registers.
- __ pop(lr);
- __ pop(r0);
- __ pop(r1);
+
__ Jump(r2);
}
@@ -6738,7 +6839,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// scratch0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -6766,10 +6867,17 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ b(eq, done);
if (i != kInlinedProbes - 1) {
+ // Load the hole ready for use below:
+ __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
// Stop if found the property.
__ cmp(entity_name, Operand(Handle<String>(name)));
__ b(eq, miss);
+ Label the_hole;
+ __ cmp(entity_name, tmp);
+ __ b(eq, &the_hole);
+
// Check if the entry name is not a symbol.
__ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
__ ldrb(entity_name,
@@ -6777,6 +6885,8 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ tst(entity_name, Operand(kIsSymbolMask));
__ b(eq, miss);
+ __ bind(&the_hole);
+
// Restore the properties.
__ ldr(properties,
FieldMemOperand(receiver, JSObject::kPropertiesOffset));
@@ -6792,7 +6902,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ mov(r1, Operand(Handle<String>(name)));
StringDictionaryLookupStub stub(NEGATIVE_LOOKUP);
__ CallStub(&stub);
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
__ ldm(ia_w, sp, spill_mask);
__ b(eq, done);
@@ -6869,7 +6979,7 @@ void StringDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
}
StringDictionaryLookupStub stub(POSITIVE_LOOKUP);
__ CallStub(&stub);
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
__ mov(scratch2, Operand(r2));
__ ldm(ia_w, sp, spill_mask);
@@ -6975,39 +7085,45 @@ struct AheadOfTimeWriteBarrierStubList {
RememberedSetAction action;
};
+#define REG(Name) { kRegister_ ## Name ## _Code }
-struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
+static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// Used in RegExpExecStub.
- { r6, r4, r7, EMIT_REMEMBERED_SET },
- { r6, r2, r7, EMIT_REMEMBERED_SET },
+ { REG(r6), REG(r4), REG(r7), EMIT_REMEMBERED_SET },
+ { REG(r6), REG(r2), REG(r7), EMIT_REMEMBERED_SET },
// Used in CompileArrayPushCall.
// Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
// Also used in KeyedStoreIC::GenerateGeneric.
- { r3, r4, r5, EMIT_REMEMBERED_SET },
+ { REG(r3), REG(r4), REG(r5), EMIT_REMEMBERED_SET },
// Used in CompileStoreGlobal.
- { r4, r1, r2, OMIT_REMEMBERED_SET },
+ { REG(r4), REG(r1), REG(r2), OMIT_REMEMBERED_SET },
// Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
- { r1, r2, r3, EMIT_REMEMBERED_SET },
- { r3, r2, r1, EMIT_REMEMBERED_SET },
+ { REG(r1), REG(r2), REG(r3), EMIT_REMEMBERED_SET },
+ { REG(r3), REG(r2), REG(r1), EMIT_REMEMBERED_SET },
// Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
- { r2, r1, r3, EMIT_REMEMBERED_SET },
- { r3, r1, r2, EMIT_REMEMBERED_SET },
+ { REG(r2), REG(r1), REG(r3), EMIT_REMEMBERED_SET },
+ { REG(r3), REG(r1), REG(r2), EMIT_REMEMBERED_SET },
// KeyedStoreStubCompiler::GenerateStoreFastElement.
- { r4, r2, r3, EMIT_REMEMBERED_SET },
+ { REG(r3), REG(r2), REG(r4), EMIT_REMEMBERED_SET },
+ { REG(r2), REG(r3), REG(r4), EMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateSmiOnlyToObject
// and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
- { r2, r3, r9, EMIT_REMEMBERED_SET },
+ { REG(r2), REG(r3), REG(r9), EMIT_REMEMBERED_SET },
+ { REG(r2), REG(r3), REG(r9), OMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateDoubleToObject
- { r6, r2, r0, EMIT_REMEMBERED_SET },
- { r2, r6, r9, EMIT_REMEMBERED_SET },
+ { REG(r6), REG(r2), REG(r0), EMIT_REMEMBERED_SET },
+ { REG(r2), REG(r6), REG(r9), EMIT_REMEMBERED_SET },
+ // StoreArrayLiteralElementStub::Generate
+ { REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
// Null termination.
- { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
+ { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
+#undef REG
bool RecordWriteStub::IsPregenerated() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
if (object_.is(entry->object) &&
@@ -7034,7 +7150,7 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
RecordWriteStub stub(entry->object,
@@ -7237,6 +7353,64 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
// Fall through when we need to inform the incremental marker.
}
+
+void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : element value to store
+ // -- r1 : array literal
+ // -- r2 : map of array literal
+ // -- r3 : element index as smi
+ // -- r4 : array literal index in function as smi
+ // -----------------------------------
+
+ Label element_done;
+ Label double_elements;
+ Label smi_element;
+ Label slow_elements;
+ Label fast_elements;
+
+ __ CheckFastElements(r2, r5, &double_elements);
+ // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ __ JumpIfSmi(r0, &smi_element);
+ __ CheckFastSmiOnlyElements(r2, r5, &fast_elements);
+
+ // Store into the array literal requires a elements transition. Call into
+ // the runtime.
+ __ bind(&slow_elements);
+ // call.
+ __ Push(r1, r3, r0);
+ __ ldr(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ ldr(r5, FieldMemOperand(r5, JSFunction::kLiteralsOffset));
+ __ Push(r5, r4);
+ __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
+
+ // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ __ bind(&fast_elements);
+ __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
+ __ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r6, r6, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ str(r0, MemOperand(r6, 0));
+ // Update the write barrier for the array store.
+ __ RecordWrite(r5, r6, r0, kLRHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ __ Ret();
+
+ // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+ // FAST_ELEMENTS, and value is Smi.
+ __ bind(&smi_element);
+ __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
+ __ add(r6, r5, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ str(r0, FieldMemOperand(r6, FixedArray::kHeaderSize));
+ __ Ret();
+
+ // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ __ bind(&double_elements);
+ __ ldr(r5, FieldMemOperand(r1, JSObject::kElementsOffset));
+ __ StoreNumberToDoubleElements(r0, r3, r1, r5, r6, r7, r9, r2,
+ &slow_elements);
+ __ Ret();
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/arm/code-stubs-arm.h b/src/3rdparty/v8/src/arm/code-stubs-arm.h
index b846864..38ed476 100644
--- a/src/3rdparty/v8/src/arm/code-stubs-arm.h
+++ b/src/3rdparty/v8/src/arm/code-stubs-arm.h
@@ -136,7 +136,7 @@ class UnaryOpStub: public CodeStub {
return UnaryOpIC::ToState(operand_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
@@ -235,7 +235,7 @@ class BinaryOpStub: public CodeStub {
return BinaryOpIC::ToState(operands_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_binary_op_type(operands_type_);
code->set_binary_op_result_type(result_type_);
}
@@ -244,6 +244,70 @@ class BinaryOpStub: public CodeStub {
};
+class StringHelper : public AllStatic {
+ public:
+ // Generate code for copying characters using a simple loop. This should only
+ // be used in places where the number of characters is small and the
+ // additional setup and checking in GenerateCopyCharactersLong adds too much
+ // overhead. Copying of overlapping regions is not supported.
+ // Dest register ends at the position after the last character written.
+ static void GenerateCopyCharacters(MacroAssembler* masm,
+ Register dest,
+ Register src,
+ Register count,
+ Register scratch,
+ bool ascii);
+
+ // Generate code for copying a large number of characters. This function
+ // is allowed to spend extra time setting up conditions to make copying
+ // faster. Copying of overlapping regions is not supported.
+ // Dest register ends at the position after the last character written.
+ static void GenerateCopyCharactersLong(MacroAssembler* masm,
+ Register dest,
+ Register src,
+ Register count,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Register scratch4,
+ Register scratch5,
+ int flags);
+
+
+ // Probe the symbol table for a two character string. If the string is
+ // not found by probing a jump to the label not_found is performed. This jump
+ // does not guarantee that the string is not in the symbol table. If the
+ // string is found the code falls through with the string in register r0.
+ // Contents of both c1 and c2 registers are modified. At the exit c1 is
+ // guaranteed to contain halfword with low and high bytes equal to
+ // initial contents of c1 and c2 respectively.
+ static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
+ Register c1,
+ Register c2,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Register scratch4,
+ Register scratch5,
+ Label* not_found);
+
+ // Generate string hash.
+ static void GenerateHashInit(MacroAssembler* masm,
+ Register hash,
+ Register character);
+
+ static void GenerateHashAddCharacter(MacroAssembler* masm,
+ Register hash,
+ Register character);
+
+ static void GenerateHashGetHash(MacroAssembler* masm,
+ Register hash);
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
+};
+
+
// Flag that indicates how to generate code for the stub StringAddStub.
enum StringAddFlags {
NO_STRING_ADD_FLAGS = 0,
@@ -585,13 +649,6 @@ class RecordWriteStub: public CodeStub {
SaveFPRegsModeBits::encode(save_fp_regs_mode_);
}
- bool MustBeInStubCache() {
- // All stubs must be registered in the stub cache
- // otherwise IncrementalMarker would not be able to find
- // and patch it.
- return true;
- }
-
void Activate(Code* code) {
code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
}
diff --git a/src/3rdparty/v8/src/arm/codegen-arm.cc b/src/3rdparty/v8/src/arm/codegen-arm.cc
index 508d830..befd8f2 100644
--- a/src/3rdparty/v8/src/arm/codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/codegen-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -37,6 +37,22 @@ namespace internal {
#define __ ACCESS_MASM(masm)
+UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) {
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ return NULL;
+}
+
+
+UnaryMathFunction CreateSqrtFunction() {
+ return &sqrt;
+}
+
// -------------------------------------------------------------------------
// Platform-specific RuntimeCallHelper functions.
@@ -90,11 +106,16 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- r3 : target map, scratch for subsequent call
// -- r4 : scratch (elements)
// -----------------------------------
- Label loop, entry, convert_hole, gc_required;
+ Label loop, entry, convert_hole, gc_required, only_change_map, done;
bool vfp3_supported = CpuFeatures::IsSupported(VFP3);
- __ push(lr);
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
__ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
+ __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex);
+ __ b(eq, &only_change_map);
+
+ __ push(lr);
__ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
// r4: source FixedArray
// r5: number of elements (smi-tagged)
@@ -104,10 +125,10 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ add(lr, lr, Operand(r5, LSL, 2));
__ AllocateInNewSpace(lr, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
// r6: destination FixedDoubleArray, not tagged as heap object
+ // Set destination FixedDoubleArray's length and map.
__ LoadRoot(r9, Heap::kFixedDoubleArrayMapRootIndex);
- __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
- // Set destination FixedDoubleArray's length.
__ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
+ __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
// Update receiver's map.
__ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
@@ -117,7 +138,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
r9,
kLRHasBeenSaved,
kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
+ OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created FixedDoubleArray.
__ add(r3, r6, Operand(kHeapObjectTag));
@@ -146,6 +167,18 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ b(&entry);
+ __ bind(&only_change_map);
+ __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ RecordWriteField(r2,
+ HeapObject::kMapOffset,
+ r3,
+ r9,
+ kLRHasBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ b(&done);
+
// Call into runtime if GC is required.
__ bind(&gc_required);
__ pop(lr);
@@ -155,10 +188,9 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ bind(&loop);
__ ldr(r9, MemOperand(r3, 4, PostIndex));
// r9: current element
- __ JumpIfNotSmi(r9, &convert_hole);
+ __ UntagAndJumpIfNotSmi(r9, r9, &convert_hole);
// Normal smi, convert to double and store.
- __ SmiUntag(r9);
if (vfp3_supported) {
CpuFeatures::Scope scope(VFP3);
__ vmov(s0, r9);
@@ -180,6 +212,13 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// Hole found, store the-hole NaN.
__ bind(&convert_hole);
+ if (FLAG_debug_code) {
+ // Restore a "smi-untagged" heap object.
+ __ SmiTag(r9);
+ __ orr(r9, r9, Operand(1));
+ __ CompareRoot(r9, Heap::kTheHoleValueRootIndex);
+ __ Assert(eq, "object found in smi-only array");
+ }
__ Strd(r4, r5, MemOperand(r7, 8, PostIndex));
__ bind(&entry);
@@ -188,6 +227,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
if (!vfp3_supported) __ Pop(r1, r0);
__ pop(lr);
+ __ bind(&done);
}
@@ -201,12 +241,16 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -- r3 : target map, scratch for subsequent call
// -- r4 : scratch (elements)
// -----------------------------------
- Label entry, loop, convert_hole, gc_required;
+ Label entry, loop, convert_hole, gc_required, only_change_map;
+
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
+ __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
+ __ CompareRoot(r4, Heap::kEmptyFixedArrayRootIndex);
+ __ b(eq, &only_change_map);
__ push(lr);
__ Push(r3, r2, r1, r0);
-
- __ ldr(r4, FieldMemOperand(r2, JSObject::kElementsOffset));
__ ldr(r5, FieldMemOperand(r4, FixedArray::kLengthOffset));
// r4: source FixedDoubleArray
// r5: number of elements (smi-tagged)
@@ -216,10 +260,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ add(r0, r0, Operand(r5, LSL, 1));
__ AllocateInNewSpace(r0, r6, r7, r9, &gc_required, NO_ALLOCATION_FLAGS);
// r6: destination FixedArray, not tagged as heap object
+ // Set destination FixedDoubleArray's length and map.
__ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
- __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
- // Set destination FixedDoubleArray's length.
__ str(r5, MemOperand(r6, FixedDoubleArray::kLengthOffset));
+ __ str(r9, MemOperand(r6, HeapObject::kMapOffset));
// Prepare for conversion loop.
__ add(r4, r4, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
@@ -275,16 +319,6 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ b(lt, &loop);
__ Pop(r3, r2, r1, r0);
- // Update receiver's map.
- __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
- __ RecordWriteField(r2,
- HeapObject::kMapOffset,
- r3,
- r9,
- kLRHasBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created and filled FixedArray.
__ str(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
__ RecordWriteField(r2,
@@ -296,6 +330,105 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(lr);
+
+ __ bind(&only_change_map);
+ // Update receiver's map.
+ __ str(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ RecordWriteField(r2,
+ HeapObject::kMapOffset,
+ r3,
+ r9,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+}
+
+
+void StringCharLoadGenerator::Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime) {
+ // Fetch the instance type of the receiver into result register.
+ __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
+
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ tst(result, Operand(kIsIndirectStringMask));
+ __ b(eq, &check_sequential);
+
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ tst(result, Operand(kSlicedNotConsMask));
+ __ b(eq, &cons_string);
+
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
+ __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
+ __ add(index, index, Operand(result, ASR, kSmiTagSize));
+ __ jmp(&indirect_string_loaded);
+
+ // Handle cons strings.
+ // Check whether the right hand side is the empty string (i.e. if
+ // this is really a flat string in a cons string). If that is not
+ // the case we would rather go to the runtime system now to flatten
+ // the string.
+ __ bind(&cons_string);
+ __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
+ __ CompareRoot(result, Heap::kEmptyStringRootIndex);
+ __ b(ne, call_runtime);
+ // Get the first of the two strings and load its instance type.
+ __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
+ __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
+
+ // Distinguish sequential and external strings. Only these two string
+ // representations can reach here (slices and flat cons strings have been
+ // reduced to the underlying sequential or external string).
+ Label external_string, check_encoding;
+ __ bind(&check_sequential);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ tst(result, Operand(kStringRepresentationMask));
+ __ b(ne, &external_string);
+
+ // Prepare sequential strings
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ add(string,
+ string,
+ Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ __ jmp(&check_encoding);
+
+ // Handle external strings.
+ __ bind(&external_string);
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ tst(result, Operand(kIsIndirectStringMask));
+ __ Assert(eq, "external string expected, but not found");
+ }
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ tst(result, Operand(kShortExternalStringMask));
+ __ b(ne, call_runtime);
+ __ ldr(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
+
+ Label ascii, done;
+ __ bind(&check_encoding);
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ tst(result, Operand(kStringEncodingMask));
+ __ b(ne, &ascii);
+ // Two-byte string.
+ __ ldrh(result, MemOperand(string, index, LSL, 1));
+ __ jmp(&done);
+ __ bind(&ascii);
+ // Ascii string.
+ __ ldrb(result, MemOperand(string, index));
+ __ bind(&done);
}
#undef __
diff --git a/src/3rdparty/v8/src/arm/codegen-arm.h b/src/3rdparty/v8/src/arm/codegen-arm.h
index f54231c..c340e6b 100644
--- a/src/3rdparty/v8/src/arm/codegen-arm.h
+++ b/src/3rdparty/v8/src/arm/codegen-arm.h
@@ -73,6 +73,21 @@ class CodeGenerator: public AstVisitor {
};
+class StringCharLoadGenerator : public AllStatic {
+ public:
+ // Generates the code for handling different string types and loading the
+ // indexed character into |result|. We expect |index| as untagged input and
+ // |result| as untagged output.
+ static void Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(StringCharLoadGenerator);
+};
+
} } // namespace v8::internal
#endif // V8_ARM_CODEGEN_ARM_H_
diff --git a/src/3rdparty/v8/src/arm/constants-arm.h b/src/3rdparty/v8/src/arm/constants-arm.h
index 823c6ff..e767001 100644
--- a/src/3rdparty/v8/src/arm/constants-arm.h
+++ b/src/3rdparty/v8/src/arm/constants-arm.h
@@ -87,22 +87,21 @@ namespace v8 {
namespace internal {
// Constant pool marker.
-static const int kConstantPoolMarkerMask = 0xffe00000;
-static const int kConstantPoolMarker = 0x0c000000;
-static const int kConstantPoolLengthMask = 0x001ffff;
+const int kConstantPoolMarkerMask = 0xffe00000;
+const int kConstantPoolMarker = 0x0c000000;
+const int kConstantPoolLengthMask = 0x001ffff;
// Number of registers in normal ARM mode.
-static const int kNumRegisters = 16;
+const int kNumRegisters = 16;
// VFP support.
-static const int kNumVFPSingleRegisters = 32;
-static const int kNumVFPDoubleRegisters = 16;
-static const int kNumVFPRegisters =
- kNumVFPSingleRegisters + kNumVFPDoubleRegisters;
+const int kNumVFPSingleRegisters = 32;
+const int kNumVFPDoubleRegisters = 16;
+const int kNumVFPRegisters = kNumVFPSingleRegisters + kNumVFPDoubleRegisters;
// PC is register 15.
-static const int kPCRegister = 15;
-static const int kNoRegister = -1;
+const int kPCRegister = 15;
+const int kNoRegister = -1;
// -----------------------------------------------------------------------------
// Conditions.
@@ -371,9 +370,9 @@ enum SoftwareInterruptCodes {
// stop
kStopCode = 1 << 23
};
-static const uint32_t kStopCodeMask = kStopCode - 1;
-static const uint32_t kMaxStopCode = kStopCode - 1;
-static const int32_t kDefaultStopCode = -1;
+const uint32_t kStopCodeMask = kStopCode - 1;
+const uint32_t kMaxStopCode = kStopCode - 1;
+const int32_t kDefaultStopCode = -1;
// Type of VFP register. Determines register encoding.
@@ -391,17 +390,17 @@ enum VFPConversionMode {
// This mask does not include the "inexact" or "input denormal" cumulative
// exceptions flags, because we usually don't want to check for it.
-static const uint32_t kVFPExceptionMask = 0xf;
-static const uint32_t kVFPInvalidOpExceptionBit = 1 << 0;
-static const uint32_t kVFPOverflowExceptionBit = 1 << 2;
-static const uint32_t kVFPUnderflowExceptionBit = 1 << 3;
-static const uint32_t kVFPInexactExceptionBit = 1 << 4;
-static const uint32_t kVFPFlushToZeroMask = 1 << 24;
+const uint32_t kVFPExceptionMask = 0xf;
+const uint32_t kVFPInvalidOpExceptionBit = 1 << 0;
+const uint32_t kVFPOverflowExceptionBit = 1 << 2;
+const uint32_t kVFPUnderflowExceptionBit = 1 << 3;
+const uint32_t kVFPInexactExceptionBit = 1 << 4;
+const uint32_t kVFPFlushToZeroMask = 1 << 24;
-static const uint32_t kVFPNConditionFlagBit = 1 << 31;
-static const uint32_t kVFPZConditionFlagBit = 1 << 30;
-static const uint32_t kVFPCConditionFlagBit = 1 << 29;
-static const uint32_t kVFPVConditionFlagBit = 1 << 28;
+const uint32_t kVFPNConditionFlagBit = 1 << 31;
+const uint32_t kVFPZConditionFlagBit = 1 << 30;
+const uint32_t kVFPCConditionFlagBit = 1 << 29;
+const uint32_t kVFPVConditionFlagBit = 1 << 28;
// VFP rounding modes. See ARM DDI 0406B Page A2-29.
@@ -418,7 +417,7 @@ enum VFPRoundingMode {
kRoundToZero = RZ
};
-static const uint32_t kVFPRoundingModeMask = 3 << 22;
+const uint32_t kVFPRoundingModeMask = 3 << 22;
enum CheckForInexactConversion {
kCheckForInexactConversion,
@@ -574,13 +573,13 @@ class Instruction {
// The naming of these accessor corresponds to figure A3-1.
//
// Two kind of accessors are declared:
- // - <Name>Field() will return the raw field, ie the field's bits at their
+ // - <Name>Field() will return the raw field, i.e. the field's bits at their
// original place in the instruction encoding.
- // eg. if instr is the 'addgt r0, r1, r2' instruction, encoded as 0xC0810002
- // ConditionField(instr) will return 0xC0000000.
+ // e.g. if instr is the 'addgt r0, r1, r2' instruction, encoded as
+ // 0xC0810002 ConditionField(instr) will return 0xC0000000.
// - <Name>Value() will return the field value, shifted back to bit 0.
- // eg. if instr is the 'addgt r0, r1, r2' instruction, encoded as 0xC0810002
- // ConditionField(instr) will return 0xC.
+ // e.g. if instr is the 'addgt r0, r1, r2' instruction, encoded as
+ // 0xC0810002 ConditionField(instr) will return 0xC.
// Generally applicable fields
diff --git a/src/3rdparty/v8/src/arm/cpu-arm.cc b/src/3rdparty/v8/src/arm/cpu-arm.cc
index 2724a64..7b08ed8 100644
--- a/src/3rdparty/v8/src/arm/cpu-arm.cc
+++ b/src/3rdparty/v8/src/arm/cpu-arm.cc
@@ -26,17 +26,12 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// CPU specific code for arm independent of OS goes here.
+#ifdef __arm__
+#include <sys/syscall.h> // for cache flushing.
+#endif
#include "v8.h"
-#if defined(__arm__)
- #if !defined(__QNXNTO__)
- #include <sys/syscall.h> // for cache flushing.
- #else
- #include <sys/mman.h> // for cache flushing.
- #endif
-#endif
-
#if defined(V8_TARGET_ARCH_ARM)
#include "cpu.h"
@@ -46,7 +41,7 @@
namespace v8 {
namespace internal {
-void CPU::Setup() {
+void CPU::SetUp() {
CpuFeatures::Probe();
}
@@ -69,10 +64,6 @@ void CPU::FlushICache(void* start, size_t size) {
// None of this code ends up in the snapshot so there are no issues
// around whether or not to generate the code when building snapshots.
Simulator::FlushICache(Isolate::Current()->simulator_i_cache(), start, size);
-#elif defined(__QNXNTO__)
- // The QNX kernel does not expose the symbol __ARM_NR_cacheflush so we
- // use the msync system call instead of the approach used on Linux
- msync(start, size, MS_SYNC|MS_INVALIDATE_ICACHE);
#else
// Ideally, we would call
// syscall(__ARM_NR_cacheflush, start,
diff --git a/src/3rdparty/v8/src/arm/debug-arm.cc b/src/3rdparty/v8/src/arm/debug-arm.cc
index b866f9c..96139a2 100644
--- a/src/3rdparty/v8/src/arm/debug-arm.cc
+++ b/src/3rdparty/v8/src/arm/debug-arm.cc
@@ -251,14 +251,6 @@ void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateConstructCallDebugBreak(MacroAssembler* masm) {
- // Calling convention for construct call (from builtins-arm.cc)
- // -- r0 : number of arguments (not smi)
- // -- r1 : constructor function
- Generate_DebugBreakCallHelper(masm, r1.bit(), r0.bit());
-}
-
-
void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
// In places other than IC call sites it is expected that r0 is TOS which
// is an object - this is not generally the case so this should be used with
@@ -267,11 +259,43 @@ void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
+void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-arm.cc).
// ----------- S t a t e -------------
- // No registers used on entry.
+ // -- r1 : function
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, 0, 0);
+ Generate_DebugBreakCallHelper(masm, r1.bit(), 0);
+}
+
+
+void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-arm.cc).
+ // ----------- S t a t e -------------
+ // -- r1 : function
+ // -- r2 : cache cell for call target
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit(), 0);
+}
+
+
+void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
+ // Calling convention for CallConstructStub (from code-stubs-arm.cc)
+ // ----------- S t a t e -------------
+ // -- r0 : number of arguments (not smi)
+ // -- r1 : constructor function
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, r1.bit(), r0.bit());
+}
+
+
+void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) {
+ // Calling convention for CallConstructStub (from code-stubs-arm.cc)
+ // ----------- S t a t e -------------
+ // -- r0 : number of arguments (not smi)
+ // -- r1 : constructor function
+ // -- r2 : cache cell for call target
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, r1.bit() | r2.bit(), r0.bit());
}
diff --git a/src/3rdparty/v8/src/arm/deoptimizer-arm.cc b/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
index 8505c7d..7b2a3c4 100644
--- a/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
+++ b/src/3rdparty/v8/src/arm/deoptimizer-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,12 +44,6 @@ int Deoptimizer::patch_size() {
}
-void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
- // Nothing to do. No new relocation information is written for lazy
- // deoptimization on ARM.
-}
-
-
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
AssertNoAllocation no_allocation;
@@ -58,58 +52,37 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// Get the optimized code.
Code* code = function->code();
+ Address code_start_address = code->instruction_start();
// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
- // For each return after a safepoint insert an absolute call to the
- // corresponding deoptimization entry.
- unsigned last_pc_offset = 0;
- SafepointTable table(function->code());
- for (unsigned i = 0; i < table.length(); i++) {
- unsigned pc_offset = table.GetPcOffset(i);
- SafepointEntry safepoint_entry = table.GetEntry(i);
- int deoptimization_index = safepoint_entry.deoptimization_index();
- int gap_code_size = safepoint_entry.gap_code_size();
- // Check that we did not shoot past next safepoint.
- CHECK(pc_offset >= last_pc_offset);
+ // For each LLazyBailout instruction insert a call to the corresponding
+ // deoptimization entry.
+ DeoptimizationInputData* deopt_data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
#ifdef DEBUG
- // Destroy the code which is not supposed to be run again.
- int instructions = (pc_offset - last_pc_offset) / Assembler::kInstrSize;
- CodePatcher destroyer(code->instruction_start() + last_pc_offset,
- instructions);
- for (int x = 0; x < instructions; x++) {
- destroyer.masm()->bkpt(0);
- }
+ Address prev_call_address = NULL;
#endif
- last_pc_offset = pc_offset;
- if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
- Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
- deoptimization_index, Deoptimizer::LAZY);
- last_pc_offset += gap_code_size;
- int call_size_in_bytes = MacroAssembler::CallSize(deoptimization_entry,
- RelocInfo::NONE);
- int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
- ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
- ASSERT(call_size_in_bytes <= patch_size());
- CodePatcher patcher(code->instruction_start() + last_pc_offset,
- call_size_in_words);
- patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE);
- last_pc_offset += call_size_in_bytes;
- }
- }
-
+ for (int i = 0; i < deopt_data->DeoptCount(); i++) {
+ if (deopt_data->Pc(i)->value() == -1) continue;
+ Address call_address = code_start_address + deopt_data->Pc(i)->value();
+ Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
+ int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
+ RelocInfo::NONE);
+ int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
+ ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
+ ASSERT(call_size_in_bytes <= patch_size());
+ CodePatcher patcher(call_address, call_size_in_words);
+ patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
+ ASSERT(prev_call_address == NULL ||
+ call_address >= prev_call_address + patch_size());
+ ASSERT(call_address + patch_size() <= code->instruction_end());
#ifdef DEBUG
- // Destroy the code which is not supposed to be run again.
- int instructions =
- (code->safepoint_table_offset() - last_pc_offset) / Assembler::kInstrSize;
- CodePatcher destroyer(code->instruction_start() + last_pc_offset,
- instructions);
- for (int x = 0; x < instructions; x++) {
- destroyer.masm()->bkpt(0);
- }
+ prev_call_address = call_address;
#endif
+ }
Isolate* isolate = code->GetIsolate();
@@ -131,15 +104,14 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
-#ifdef DEBUG
- if (FLAG_print_code) {
- code->PrintLn();
- }
-#endif
}
}
+static const int32_t kBranchBeforeStackCheck = 0x2a000001;
+static const int32_t kBranchBeforeInterrupt = 0x5a000004;
+
+
void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
Address pc_after,
Code* check_code,
@@ -150,10 +122,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// 2a 00 00 01 bcs ok
// e5 9f c? ?? ldr ip, [pc, <stack guard address>]
// e1 2f ff 3c blx ip
- ASSERT(Memory::int32_at(pc_after - kInstrSize) ==
- (al | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | BLX | ip.code()));
+ ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp);
ASSERT(Assembler::IsLdrPcImmediateOffset(
Assembler::instr_at(pc_after - 2 * kInstrSize)));
+ if (FLAG_count_based_interrupts) {
+ ASSERT_EQ(kBranchBeforeInterrupt,
+ Memory::int32_at(pc_after - 3 * kInstrSize));
+ } else {
+ ASSERT_EQ(kBranchBeforeStackCheck,
+ Memory::int32_at(pc_after - 3 * kInstrSize));
+ }
// We patch the code to the following form:
// e1 5d 00 0c cmp sp, <limit>
@@ -187,13 +165,21 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
const int kInstrSize = Assembler::kInstrSize;
- ASSERT(Memory::uint32_at(pc_after - kInstrSize) == 0xe12fff3c);
- ASSERT(Memory::uint8_at(pc_after - kInstrSize - 1) == 0xe5);
- ASSERT(Memory::uint8_at(pc_after - kInstrSize - 2) == 0x9f);
+ ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp);
+ ASSERT(Assembler::IsLdrPcImmediateOffset(
+ Assembler::instr_at(pc_after - 2 * kInstrSize)));
// Replace NOP with conditional jump.
CodePatcher patcher(pc_after - 3 * kInstrSize, 1);
- patcher.masm()->b(+4, cs);
+ if (FLAG_count_based_interrupts) {
+ patcher.masm()->b(+16, pl);
+ ASSERT_EQ(kBranchBeforeInterrupt,
+ Memory::int32_at(pc_after - 3 * kInstrSize));
+ } else {
+ patcher.masm()->b(+4, cs);
+ ASSERT_EQ(kBranchBeforeStackCheck,
+ Memory::int32_at(pc_after - 3 * kInstrSize));
+ }
// Replace the stack check address in the constant pool
// with the entry address of the replacement code.
@@ -243,12 +229,13 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
ASSERT(Translation::BEGIN == opcode);
USE(opcode);
int count = iterator.Next();
+ iterator.Skip(1); // Drop JS frame count.
ASSERT(count == 1);
USE(count);
opcode = static_cast<Translation::Opcode>(iterator.Next());
USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+ ASSERT(Translation::JS_FRAME == opcode);
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
@@ -284,9 +271,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_ = new FrameDescription*[1];
output_[0] = new(output_frame_size) FrameDescription(
output_frame_size, function_);
-#ifdef DEBUG
- output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
-#endif
+ output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
// Clear the incoming parameters in the optimized frame to avoid
// confusing the garbage collector.
@@ -351,7 +336,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0] = input_;
output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
} else {
- // Setup the frame pointer and the context pointer.
+ // Set up the frame pointer and the context pointer.
output_[0]->SetRegister(fp.code(), input_->GetRegister(fp.code()));
output_[0]->SetRegister(cp.code(), input_->GetRegister(cp.code()));
@@ -374,15 +359,220 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
}
+void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
+
+ // Arguments adaptor can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // A marker value is used in place of the context.
+ output_offset -= kPointerSize;
+ intptr_t context = reinterpret_cast<intptr_t>(
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ output_frame->SetFrameSlot(output_offset, context);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
+ top_address + output_offset, output_offset, context);
+ }
+
+ // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* adaptor_trampoline =
+ builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ adaptor_trampoline->instruction_start() +
+ isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 7 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // Constructor function being invoked by the stub.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32 code, but relies on register names (fp, sp)
// and how the frame is laid out.
-void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
- int frame_index) {
+void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
+ int frame_index) {
// Read the ast node id, function, and frame height for this output frame.
- Translation::Opcode opcode =
- static_cast<Translation::Opcode>(iterator->Next());
- USE(opcode);
- ASSERT(Translation::FRAME == opcode);
int node_id = iterator->Next();
JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
unsigned height = iterator->Next();
@@ -402,9 +592,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// Allocate and store the output frame description.
FrameDescription* output_frame =
new(output_frame_size) FrameDescription(output_frame_size, function);
-#ifdef DEBUG
- output_frame->SetKind(Code::FUNCTION);
-#endif
+ output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
bool is_bottommost = (0 == frame_index);
bool is_topmost = (output_count_ - 1 == frame_index);
@@ -492,9 +680,8 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
- if (is_topmost) {
- output_frame->SetRegister(cp.code(), value);
- }
+ output_frame->SetContext(value);
+ if (is_topmost) output_frame->SetRegister(cp.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
top_address + output_offset, output_offset, value);
@@ -755,10 +942,7 @@ void Deoptimizer::EntryGenerator::Generate() {
__ pop(ip); // remove sp
__ pop(ip); // remove lr
- // Set up the roots register.
- ExternalReference roots_array_start =
- ExternalReference::roots_array_start(isolate);
- __ mov(r10, Operand(roots_array_start));
+ __ InitializeRootRegister();
__ pop(ip); // remove pc
__ pop(r7); // get continuation, leave pc on stack
diff --git a/src/3rdparty/v8/src/arm/disasm-arm.cc b/src/3rdparty/v8/src/arm/disasm-arm.cc
index 603b3cf..96a7d3c 100644
--- a/src/3rdparty/v8/src/arm/disasm-arm.cc
+++ b/src/3rdparty/v8/src/arm/disasm-arm.cc
@@ -473,7 +473,7 @@ int Decoder::FormatOption(Instruction* instr, const char* format) {
return 1;
}
case 'i': { // 'i: immediate value from adjacent bits.
- // Expects tokens in the form imm%02d@%02d, ie. imm05@07, imm10@16
+ // Expects tokens in the form imm%02d@%02d, i.e. imm05@07, imm10@16
int width = (format[3] - '0') * 10 + (format[4] - '0');
int lsb = (format[6] - '0') * 10 + (format[7] - '0');
@@ -662,6 +662,15 @@ void Decoder::Format(Instruction* instr, const char* format) {
}
+// The disassembler may end up decoding data inlined in the code. We do not want
+// it to crash if the data does not ressemble any known instruction.
+#define VERIFY(condition) \
+if(!(condition)) { \
+ Unknown(instr); \
+ return; \
+}
+
+
// For currently unimplemented decodings the disassembler calls Unknown(instr)
// which will just print "unknown" of the instruction bits.
void Decoder::Unknown(Instruction* instr) {
@@ -947,13 +956,13 @@ void Decoder::DecodeType2(Instruction* instr) {
void Decoder::DecodeType3(Instruction* instr) {
switch (instr->PUField()) {
case da_x: {
- ASSERT(!instr->HasW());
+ VERIFY(!instr->HasW());
Format(instr, "'memop'cond'b 'rd, ['rn], -'shift_rm");
break;
}
case ia_x: {
if (instr->HasW()) {
- ASSERT(instr->Bits(5, 4) == 0x1);
+ VERIFY(instr->Bits(5, 4) == 0x1);
if (instr->Bit(22) == 0x1) {
Format(instr, "usat 'rd, #'imm05@16, 'rm'shift_sat");
} else {
@@ -1074,8 +1083,8 @@ int Decoder::DecodeType7(Instruction* instr) {
// vmsr
// Dd = vsqrt(Dm)
void Decoder::DecodeTypeVFP(Instruction* instr) {
- ASSERT((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0) );
- ASSERT(instr->Bits(11, 9) == 0x5);
+ VERIFY((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0) );
+ VERIFY(instr->Bits(11, 9) == 0x5);
if (instr->Bit(4) == 0) {
if (instr->Opc1Value() == 0x7) {
@@ -1166,7 +1175,7 @@ void Decoder::DecodeTypeVFP(Instruction* instr) {
void Decoder::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
Instruction* instr) {
- ASSERT((instr->Bit(4) == 1) && (instr->VCValue() == 0x0) &&
+ VERIFY((instr->Bit(4) == 1) && (instr->VCValue() == 0x0) &&
(instr->VAValue() == 0x0));
bool to_arm_register = (instr->VLValue() == 0x1);
@@ -1180,8 +1189,8 @@ void Decoder::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
void Decoder::DecodeVCMP(Instruction* instr) {
- ASSERT((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
- ASSERT(((instr->Opc2Value() == 0x4) || (instr->Opc2Value() == 0x5)) &&
+ VERIFY((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
+ VERIFY(((instr->Opc2Value() == 0x4) || (instr->Opc2Value() == 0x5)) &&
(instr->Opc3Value() & 0x1));
// Comparison.
@@ -1203,8 +1212,8 @@ void Decoder::DecodeVCMP(Instruction* instr) {
void Decoder::DecodeVCVTBetweenDoubleAndSingle(Instruction* instr) {
- ASSERT((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
- ASSERT((instr->Opc2Value() == 0x7) && (instr->Opc3Value() == 0x3));
+ VERIFY((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
+ VERIFY((instr->Opc2Value() == 0x7) && (instr->Opc3Value() == 0x3));
bool double_to_single = (instr->SzValue() == 1);
@@ -1217,8 +1226,8 @@ void Decoder::DecodeVCVTBetweenDoubleAndSingle(Instruction* instr) {
void Decoder::DecodeVCVTBetweenFloatingPointAndInteger(Instruction* instr) {
- ASSERT((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
- ASSERT(((instr->Opc2Value() == 0x8) && (instr->Opc3Value() & 0x1)) ||
+ VERIFY((instr->Bit(4) == 0) && (instr->Opc1Value() == 0x7));
+ VERIFY(((instr->Opc2Value() == 0x8) && (instr->Opc3Value() & 0x1)) ||
(((instr->Opc2Value() >> 1) == 0x6) && (instr->Opc3Value() & 0x1)));
bool to_integer = (instr->Bit(18) == 1);
@@ -1265,7 +1274,7 @@ void Decoder::DecodeVCVTBetweenFloatingPointAndInteger(Instruction* instr) {
// Ddst = MEM(Rbase + 4*offset).
// MEM(Rbase + 4*offset) = Dsrc.
void Decoder::DecodeType6CoprocessorIns(Instruction* instr) {
- ASSERT(instr->TypeValue() == 6);
+ VERIFY(instr->TypeValue() == 6);
if (instr->CoprocessorValue() == 0xA) {
switch (instr->OpcodeValue()) {
@@ -1347,6 +1356,7 @@ void Decoder::DecodeType6CoprocessorIns(Instruction* instr) {
}
}
+#undef VERIFIY
bool Decoder::IsConstantPoolAt(byte* instr_ptr) {
int instruction_bits = *(reinterpret_cast<int*>(instr_ptr));
diff --git a/src/3rdparty/v8/src/arm/frames-arm.h b/src/3rdparty/v8/src/arm/frames-arm.h
index c66ceee..a10acd0 100644
--- a/src/3rdparty/v8/src/arm/frames-arm.h
+++ b/src/3rdparty/v8/src/arm/frames-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,22 +35,22 @@ namespace internal {
// The ARM ABI does not specify the usage of register r9, which may be reserved
// as the static base or thread register on some platforms, in which case we
// leave it alone. Adjust the value of kR9Available accordingly:
-static const int kR9Available = 1; // 1 if available to us, 0 if reserved
+const int kR9Available = 1; // 1 if available to us, 0 if reserved
// Register list in load/store instructions
// Note that the bit values must match those used in actual instruction encoding
-static const int kNumRegs = 16;
+const int kNumRegs = 16;
// Caller-saved/arguments registers
-static const RegList kJSCallerSaved =
+const RegList kJSCallerSaved =
1 << 0 | // r0 a1
1 << 1 | // r1 a2
1 << 2 | // r2 a3
1 << 3; // r3 a4
-static const int kNumJSCallerSaved = 4;
+const int kNumJSCallerSaved = 4;
typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
@@ -60,7 +60,7 @@ int JSCallerSavedCode(int n);
// Callee-saved registers preserved when switching from C to JavaScript
-static const RegList kCalleeSaved =
+const RegList kCalleeSaved =
1 << 4 | // r4 v1
1 << 5 | // r5 v2
1 << 6 | // r6 v3
@@ -72,7 +72,7 @@ static const RegList kCalleeSaved =
// When calling into C++ (only for C++ calls that can't cause a GC).
// The call code will take care of lr, fp, etc.
-static const RegList kCallerSaved =
+const RegList kCallerSaved =
1 << 0 | // r0
1 << 1 | // r1
1 << 2 | // r2
@@ -80,36 +80,35 @@ static const RegList kCallerSaved =
1 << 9; // r9
-static const int kNumCalleeSaved = 7 + kR9Available;
+const int kNumCalleeSaved = 7 + kR9Available;
// Double registers d8 to d15 are callee-saved.
-static const int kNumDoubleCalleeSaved = 8;
+const int kNumDoubleCalleeSaved = 8;
// Number of registers for which space is reserved in safepoints. Must be a
// multiple of 8.
// TODO(regis): Only 8 registers may actually be sufficient. Revisit.
-static const int kNumSafepointRegisters = 16;
+const int kNumSafepointRegisters = 16;
// Define the list of registers actually saved at safepoints.
// Note that the number of saved registers may be smaller than the reserved
// space, i.e. kNumSafepointSavedRegisters <= kNumSafepointRegisters.
-static const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
-static const int kNumSafepointSavedRegisters =
- kNumJSCallerSaved + kNumCalleeSaved;
+const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
+const int kNumSafepointSavedRegisters = kNumJSCallerSaved + kNumCalleeSaved;
// ----------------------------------------------------
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kStateOffset = 1 * kPointerSize;
- static const int kContextOffset = 2 * kPointerSize;
- static const int kFPOffset = 3 * kPointerSize;
- static const int kPCOffset = 4 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kCodeOffset = 1 * kPointerSize;
+ static const int kStateOffset = 2 * kPointerSize;
+ static const int kContextOffset = 3 * kPointerSize;
+ static const int kFPOffset = 4 * kPointerSize;
- static const int kSize = kPCOffset + kPointerSize;
+ static const int kSize = kFPOffset + kPointerSize;
};
@@ -137,6 +136,9 @@ class ExitFrameConstants : public AllStatic {
class StandardFrameConstants : public AllStatic {
public:
+ // Fixed part of the frame consists of return address, caller fp,
+ // context and function.
+ static const int kFixedFrameSize = 4 * kPointerSize;
static const int kExpressionsOffset = -3 * kPointerSize;
static const int kMarkerOffset = -2 * kPointerSize;
static const int kContextOffset = -1 * kPointerSize;
@@ -162,6 +164,8 @@ class JavaScriptFrameConstants : public AllStatic {
class ArgumentsAdaptorFrameConstants : public AllStatic {
public:
static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
+ static const int kFrameSize =
+ StandardFrameConstants::kFixedFrameSize + kPointerSize;
};
diff --git a/src/3rdparty/v8/src/arm/full-codegen-arm.cc b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
index bf3c15f..db95f78 100644
--- a/src/3rdparty/v8/src/arm/full-codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/full-codegen-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,6 +34,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -115,7 +116,7 @@ class JumpPatchSite BASE_EMBEDDED {
// function.
//
// The live registers are:
-// o r1: the JS function object being called (ie, ourselves)
+// o r1: the JS function object being called (i.e., ourselves)
// o cp: our context
// o fp: our caller's frame pointer
// o sp: stack pointer
@@ -123,10 +124,12 @@ class JumpPatchSite BASE_EMBEDDED {
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-arm.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info) {
- ASSERT(info_ == NULL);
- info_ = info;
- scope_ = info->scope();
+void FullCodeGenerator::Generate() {
+ CompilationInfo* info = info_;
+ handler_table_ =
+ isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
@@ -141,7 +144,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for
// function calls.
- if (info->is_strict_mode() || info->is_native()) {
+ if (!info->is_classic_mode() || info->is_native()) {
Label ok;
__ cmp(r5, Operand(0));
__ b(eq, &ok);
@@ -177,13 +180,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Possibly allocate a local context.
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is in r1.
__ push(r1);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -235,7 +237,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
- if (is_strict_mode()) {
+ if (!is_classic_mode()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
@@ -264,11 +266,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@@ -309,19 +311,68 @@ void FullCodeGenerator::ClearAccumulator() {
}
-void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ mov(r2, Operand(profiling_counter_));
+ __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+ __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
+ __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing: if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ mov(r2, Operand(profiling_counter_));
+ __ mov(r3, Operand(Smi::FromInt(reset_value)));
+ __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 142;
+
+
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
+ Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
Label ok;
- __ LoadRoot(ip, Heap::kStackLimitRootIndex);
- __ cmp(sp, Operand(ip));
- __ b(hs, &ok);
- StackCheckStub stub;
- __ CallStub(&stub);
+
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ b(pl, &ok);
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ __ LoadRoot(ip, Heap::kStackLimitRootIndex);
+ __ cmp(sp, Operand(ip));
+ __ b(hs, &ok);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
+
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
RecordStackCheck(stmt->OsrEntryId());
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
@@ -343,6 +394,32 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(r0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ b(pl, &ok);
+ __ push(r0);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(r2);
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(r0);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
@@ -705,17 +782,16 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
Variable* variable = proxy->var();
- bool binding_needs_init =
- mode == CONST || mode == CONST_HARMONY || mode == LET;
+ bool binding_needs_init = (function == NULL) &&
+ (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
@@ -800,9 +876,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
@@ -864,7 +937,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ cmp(r0, Operand(0));
@@ -917,6 +990,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ cmp(r0, null_value);
__ b(eq, &exit);
+ PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
+
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(r0, &convert);
@@ -938,48 +1013,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
- Label next;
- // Preload a couple of values used in the loop.
- Register empty_fixed_array_value = r6;
- __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = r7;
- __ LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- __ mov(r1, r0);
- __ bind(&next);
-
- // Check that there are no elements. Register r1 contains the
- // current JS object we've reached through the prototype chain.
- __ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
- __ cmp(r2, empty_fixed_array_value);
- __ b(ne, &call_runtime);
-
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in r2 for the subsequent
- // prototype load.
- __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
- __ JumpIfSmi(r3, &call_runtime);
-
- // Check that there is an enum cache in the non-empty instance
- // descriptors (r3). This is the case if the next enumeration
- // index field does not contain a smi.
- __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
- __ JumpIfSmi(r3, &call_runtime);
-
- // For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- __ cmp(r1, r0);
- __ b(eq, &check_prototype);
- __ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
- __ cmp(r3, empty_fixed_array_value);
- __ b(ne, &call_runtime);
-
- // Load the prototype from the map and loop if non-null.
- __ bind(&check_prototype);
- __ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
- __ cmp(r1, null_value);
- __ b(ne, &next);
+ __ CheckEnumCache(null_value, &call_runtime);
// The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration.
@@ -1008,7 +1042,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
__ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
- // Setup the four remaining stack slots.
+ // Set up the four remaining stack slots.
__ push(r0); // Map.
__ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
__ mov(r0, Operand(Smi::FromInt(0)));
@@ -1019,6 +1053,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register r0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(r1, cell);
+ __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+ __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
+
__ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1032,6 +1076,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Push(r1, r0); // Fixed array length (as smi) and initial index.
// Generate code for doing the condition check.
+ PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
__ bind(&loop);
// Load the current count to r0, load the length to r1.
__ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
@@ -1075,7 +1120,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(result_register(), r3);
// Perform the assignment as if via '='.
{ EffectContext context(this);
- EmitAssignment(stmt->each(), stmt->AssignmentId());
+ EmitAssignment(stmt->each());
}
// Generate code for the body of the loop.
@@ -1088,7 +1133,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ add(r0, r0, Operand(Smi::FromInt(1)));
__ push(r0);
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &loop);
__ b(&loop);
// Remove the pointers stored on the stack.
@@ -1096,6 +1141,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Drop(5);
// Exit and decrement the loop depth.
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@@ -1114,7 +1160,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode_flag());
+ FastNewClosureStub stub(info->language_mode());
__ mov(r0, Operand(info));
__ push(r0);
__ CallStub(&stub);
@@ -1183,13 +1229,13 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ bind(&fast);
}
- __ ldr(r0, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(var->name()));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1270,10 +1316,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in r2 and the global
// object (receiver) in r0.
- __ ldr(r0, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(var->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(r0);
break;
}
@@ -1284,28 +1330,64 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, var->IsContextSlot()
? "Context variable"
: "Stack variable");
- if (!var->binding_needs_init()) {
- context()->Plug(var);
- } else {
- // Let and const need a read barrier.
- GetVar(r0, var);
- __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
- if (var->mode() == LET || var->mode() == CONST_HARMONY) {
- // Throw a reference error when using an uninitialized let/const
- // binding in harmony mode.
- Label done;
- __ b(ne, &done);
- __ mov(r0, Operand(var->name()));
- __ push(r0);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- __ bind(&done);
+ if (var->binding_needs_init()) {
+ // var->scope() may be NULL when the proxy is located in eval code and
+ // refers to a potential outside binding. Currently those bindings are
+ // always looked up dynamically, i.e. in that case
+ // var->location() == LOOKUP.
+ // always holds.
+ ASSERT(var->scope() != NULL);
+
+ // Check if the binding really needs an initialization check. The check
+ // can be skipped in the following situation: we have a LET or CONST
+ // binding in harmony mode, both the Variable and the VariableProxy have
+ // the same declaration scope (i.e. they are both in global code, in the
+ // same function or in the same eval code) and the VariableProxy is in
+ // the source physically located after the initializer of the variable.
+ //
+ // We cannot skip any initialization checks for CONST in non-harmony
+ // mode because const variables may be declared but never initialized:
+ // if (false) { const x; }; var y = x;
+ //
+ // The condition on the declaration scopes is a conservative check for
+ // nested functions that access a binding and are called before the
+ // binding is initialized:
+ // function() { f(); let x = 1; function f() { x = 2; } }
+ //
+ bool skip_init_check;
+ if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
+ skip_init_check = false;
} else {
- // Uninitalized const bindings outside of harmony mode are unholed.
- ASSERT(var->mode() == CONST);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+ // Check that we always have valid source position.
+ ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
+ ASSERT(proxy->position() != RelocInfo::kNoPosition);
+ skip_init_check = var->mode() != CONST &&
+ var->initializer_position() < proxy->position();
+ }
+
+ if (!skip_init_check) {
+ // Let and const need a read barrier.
+ GetVar(r0, var);
+ __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
+ if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+ // Throw a reference error when using an uninitialized let/const
+ // binding in harmony mode.
+ Label done;
+ __ b(ne, &done);
+ __ mov(r0, Operand(var->name()));
+ __ push(r0);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ } else {
+ // Uninitalized const bindings outside of harmony mode are unholed.
+ ASSERT(var->mode() == CONST);
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+ }
+ context()->Plug(r0);
+ break;
}
- context()->Plug(r0);
}
+ context()->Plug(var);
break;
}
@@ -1377,12 +1459,23 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+void FullCodeGenerator::EmitAccessor(Expression* expression) {
+ if (expression == NULL) {
+ __ LoadRoot(r1, Heap::kNullValueRootIndex);
+ __ push(r1);
+ } else {
+ VisitForStackValue(expression);
+ }
+}
+
+
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
+ Handle<FixedArray> constant_properties = expr->constant_properties();
__ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
- __ mov(r1, Operand(expr->constant_properties()));
+ __ mov(r1, Operand(constant_properties));
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
@@ -1391,10 +1484,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
: ObjectLiteral::kNoFlags;
__ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
+ int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ __ CallStub(&stub);
}
// If result_saved is true the result is on top of the stack. If
@@ -1406,6 +1504,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
+ AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1428,10 +1527,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
VisitForAccumulatorValue(value);
__ mov(r2, Operand(key->handle()));
__ ldr(r1, MemOperand(sp));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, key->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1454,21 +1553,29 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
}
break;
case ObjectLiteral::Property::GETTER:
+ accessor_table.lookup(key)->second->getter = value;
+ break;
case ObjectLiteral::Property::SETTER:
- // Duplicate receiver on stack.
- __ ldr(r0, MemOperand(sp));
- __ push(r0);
- VisitForStackValue(key);
- __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- __ push(r1);
- VisitForStackValue(value);
- __ CallRuntime(Runtime::kDefineAccessor, 4);
+ accessor_table.lookup(key)->second->setter = value;
break;
}
}
+ // Emit code to define accessors, using only a single call to the runtime for
+ // each pair of corresponding getters and setters.
+ for (AccessorTable::Iterator it = accessor_table.begin();
+ it != accessor_table.end();
+ ++it) {
+ __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
+ __ push(r0);
+ VisitForStackValue(it->first);
+ EmitAccessor(it->second->getter);
+ EmitAccessor(it->second->setter);
+ __ mov(r0, Operand(Smi::FromInt(NONE)));
+ __ push(r0);
+ __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ }
+
if (expr->has_function()) {
ASSERT(result_saved);
__ ldr(r0, MemOperand(sp));
@@ -1493,6 +1600,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1501,7 +1609,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(constant_elements));
__ Push(r3, r2, r1);
- if (constant_elements_values->map() ==
+ if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
@@ -1516,10 +1624,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(constant_elements_kind == FAST_ELEMENTS ||
constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ FastCloneShallowArrayStub::Mode mode = has_fast_elements
+ ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
__ CallStub(&stub);
}
@@ -1543,56 +1650,23 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- __ ldr(r6, MemOperand(sp)); // Copy of array literal.
- __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
- __ ldr(r2, FieldMemOperand(r6, JSObject::kMapOffset));
- int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-
- Label element_done;
- Label double_elements;
- Label smi_element;
- Label slow_elements;
- Label fast_elements;
- __ CheckFastElements(r2, r3, &double_elements);
-
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
- __ JumpIfSmi(result_register(), &smi_element);
- __ CheckFastSmiOnlyElements(r2, r3, &fast_elements);
-
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- __ bind(&slow_elements);
- __ push(r6); // Copy of array literal.
- __ mov(r1, Operand(Smi::FromInt(i)));
- __ mov(r2, Operand(Smi::FromInt(NONE))); // PropertyAttributes
- __ mov(r3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode.
- __ Push(r1, result_register(), r2, r3);
- __ CallRuntime(Runtime::kSetProperty, 5);
- __ b(&element_done);
-
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
- __ mov(r3, Operand(Smi::FromInt(i)));
- __ StoreNumberToDoubleElements(result_register(), r3, r6, r1, r4, r5, r9,
- r7, &slow_elements);
- __ b(&element_done);
-
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ str(result_register(), FieldMemOperand(r1, offset));
- // Update the write barrier for the array store.
- __ RecordWriteField(
- r1, offset, result_register(), r2, kLRHasBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ b(&element_done);
-
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
- __ bind(&smi_element);
- __ str(result_register(), FieldMemOperand(r1, offset));
- // Fall through
-
- __ bind(&element_done);
+ if (constant_elements_kind == FAST_ELEMENTS) {
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
+ __ ldr(r6, MemOperand(sp)); // Copy of array literal.
+ __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
+ __ str(result_register(), FieldMemOperand(r1, offset));
+ // Update the write barrier for the array store.
+ __ RecordWriteField(r1, offset, result_register(), r2,
+ kLRHasBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
+ } else {
+ __ ldr(r1, MemOperand(sp)); // Copy of array literal.
+ __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset));
+ __ mov(r3, Operand(Smi::FromInt(i)));
+ __ mov(r4, Operand(Smi::FromInt(expr->literal_index())));
+ StoreArrayLiteralElementStub stub;
+ __ CallStub(&stub);
+ }
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
@@ -1724,7 +1798,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ mov(r2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name r0 and r2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1732,7 +1806,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1759,7 +1833,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -1810,7 +1884,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ mov(ip, Operand(scratch1, ASR, 31));
__ cmp(ip, Operand(scratch2));
__ b(ne, &stub_call);
- __ tst(scratch1, Operand(scratch1));
+ __ cmp(scratch1, Operand(0));
__ mov(right, Operand(scratch1), LeaveCC, ne);
__ b(ne, &done);
__ add(scratch2, right, Operand(left), SetCC);
@@ -1842,13 +1916,13 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(r1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(r0);
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1880,10 +1954,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(r1, r0);
__ pop(r0); // Restore value.
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1893,14 +1967,13 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(r1, r0);
__ pop(r2);
__ pop(r0); // Restore value.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
}
- PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(r0);
}
@@ -1910,11 +1983,11 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
if (var->IsUnallocated()) {
// Global var, const, or let.
__ mov(r2, Operand(var->name()));
- __ ldr(r1, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ ldr(r1, GlobalObjectOperand());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
@@ -1944,7 +2017,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
if (var->IsLookupSlot()) {
__ push(r0); // Value.
__ mov(r1, Operand(var->name()));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ mov(r0, Operand(Smi::FromInt(language_mode())));
__ Push(cp, r1, r0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
@@ -1992,7 +2065,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
ASSERT(var->IsLookupSlot());
__ push(r0); // Value.
__ mov(r1, Operand(var->name()));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ mov(r0, Operand(Smi::FromInt(language_mode())));
__ Push(cp, r1, r0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
}
@@ -2029,10 +2102,10 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ pop(r1);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2075,10 +2148,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ pop(r2);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2112,6 +2185,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
}
+
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ Call(code, rmode, ast_id);
+}
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2129,7 +2210,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2162,7 +2243,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2182,6 +2263,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, flags);
+ __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2199,20 +2281,19 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
}
__ push(r1);
- // Push the receiver of the enclosing function and do runtime call.
+ // Push the receiver of the enclosing function.
int receiver_offset = 2 + info_->scope()->num_parameters();
__ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
__ push(r1);
- // Push the strict mode flag. In harmony mode every eval call
- // is a strict mode eval call.
- StrictModeFlag strict_mode =
- FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
- __ mov(r1, Operand(Smi::FromInt(strict_mode)));
+ // Push the language mode.
+ __ mov(r1, Operand(Smi::FromInt(language_mode())));
__ push(r1);
- // Push the qml mode flag.
- __ mov(r1, Operand(Smi::FromInt(is_qml_mode())));
+
+ // Push the start position of the scope the calls resides in.
+ __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
__ push(r1);
+ // Do the runtime call.
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
}
@@ -2262,6 +2343,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
+ __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2269,7 +2351,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
context()->DropAndPlug(1, r0);
} else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
- __ ldr(r0, proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ ldr(r0, GlobalObjectOperand());
__ push(r0);
EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -2368,9 +2450,23 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(r0, Operand(arg_count));
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
- Handle<Code> construct_builtin =
- isolate()->builtins()->JSConstructCall();
- __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
+ // Record call targets in unoptimized code, but not in the snapshot.
+ CallFunctionFlags flags;
+ if (!Serializer::enabled()) {
+ flags = RECORD_CALL_TARGET;
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ mov(r2, Operand(cell));
+ } else {
+ flags = NO_CALL_FUNCTION_FLAGS;
+ }
+
+ CallConstructStub stub(flags);
+ __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(r0);
}
@@ -2921,14 +3017,62 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = r0;
+ Register result = r0;
+ Register scratch0 = r9;
+ Register scratch1 = r1;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
+ __ Assert(eq, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch1, Operand(stamp));
+ __ ldr(scratch1, MemOperand(scratch1));
+ __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ cmp(scratch1, scratch0);
+ __ b(ne, &runtime);
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch1);
+ __ mov(r1, Operand(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+ context()->Plug(r0);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub;
- __ CallStub(&stub);
+ if (CpuFeatures::IsSupported(VFP3)) {
+ MathPowStub stub(MathPowStub::ON_STACK);
+ __ CallStub(&stub);
+ } else {
+ __ CallRuntime(Runtime::kMath_pow, 2);
+ }
context()->Plug(r0);
}
@@ -2999,7 +3143,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Register object = r1;
Register index = r0;
- Register scratch = r2;
Register result = r3;
__ pop(object);
@@ -3009,7 +3152,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Label done;
StringCharCodeAtGenerator generator(object,
index,
- scratch,
result,
&need_conversion,
&need_conversion,
@@ -3046,8 +3188,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Register object = r1;
Register index = r0;
- Register scratch1 = r2;
- Register scratch2 = r3;
+ Register scratch = r3;
Register result = r0;
__ pop(object);
@@ -3057,8 +3198,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Label done;
StringCharAtGenerator generator(object,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&need_conversion,
&need_conversion,
@@ -3135,6 +3275,18 @@ void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
+ // Load the argument on the stack and call the stub.
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::TAGGED);
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 1);
+ VisitForStackValue(args->at(0));
+ __ CallStub(&stub);
+ context()->Plug(r0);
+}
+
+
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3167,12 +3319,24 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
+ // Check for proxy.
+ Label proxy, done;
+ __ CompareObjectType(r0, r1, r1, JS_FUNCTION_PROXY_TYPE);
+ __ b(eq, &proxy);
+
// InvokeFunction requires the function in r1. Move it in there.
__ mov(r1, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(r1, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ __ jmp(&done);
+
+ __ bind(&proxy);
+ __ push(r0);
+ __ CallRuntime(Runtime::kCall, args->length());
+ __ bind(&done);
+
context()->Plug(r0);
}
@@ -3489,7 +3653,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
__ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
- __ add(string_length, string_length, Operand(scratch1));
+ __ add(string_length, string_length, Operand(scratch1), SetCC);
__ b(vs, &bailout);
__ cmp(element, elements_end);
__ b(lt, &loop);
@@ -3526,7 +3690,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ b(ne, &bailout);
__ tst(scratch2, Operand(0x80000000));
__ b(ne, &bailout);
- __ add(string_length, string_length, Operand(scratch2));
+ __ add(string_length, string_length, Operand(scratch2), SetCC);
__ b(vs, &bailout);
__ SmiUntag(string_length);
@@ -3583,7 +3747,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// One-character separator case
__ bind(&one_char_separator);
- // Replace separator with its ascii character value.
+ // Replace separator with its ASCII character value.
__ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator
@@ -3594,7 +3758,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// result_pos: the position to which we are currently copying characters.
// element: Current array element.
// elements_end: Array end.
- // separator: Single separator ascii char (in lower byte).
+ // separator: Single separator ASCII char (in lower byte).
// Copy the separator character to the result.
__ strb(separator, MemOperand(result_pos, 1, PostIndex));
@@ -3676,7 +3840,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -3697,7 +3861,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
+ StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
__ push(r1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
@@ -3705,9 +3871,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is allowed.
- ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
+ ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
if (var->IsUnallocated()) {
- __ ldr(r2, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ ldr(r2, GlobalObjectOperand());
__ mov(r1, Operand(var->name()));
__ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(r2, r1, r0);
@@ -3829,7 +3995,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register r0.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(r0);
}
@@ -3940,7 +4106,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -3969,10 +4135,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case NAMED_PROPERTY: {
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
__ pop(r1);
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3986,10 +4152,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ pop(r1); // Key.
__ pop(r2); // Receiver.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4010,12 +4176,12 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
VariableProxy* proxy = expr->AsVariableProxy();
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
- __ ldr(r0, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(proxy->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ Call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(r0);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4198,7 +4364,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ cmp(r0, Operand(0));
diff --git a/src/3rdparty/v8/src/arm/ic-arm.cc b/src/3rdparty/v8/src/arm/ic-arm.cc
index 18d4a9f..e843657 100644
--- a/src/3rdparty/v8/src/arm/ic-arm.cc
+++ b/src/3rdparty/v8/src/arm/ic-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -399,7 +399,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
// If the stub cache probing failed, the receiver might be a value.
// For value objects, we use the map of the prototype objects for
@@ -438,7 +438,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Probe the stub cache for the value object.
__ bind(&probe);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
__ bind(&miss);
}
@@ -706,7 +706,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
Code::Flags flags =
Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r0, r2, r3, r4, r5);
+ masm, flags, r0, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1031,15 +1031,34 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
__ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
__ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
- __ And(r3, r3, Operand(KeyedLookupCache::kCapacityMask));
+ int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
+ __ And(r3, r3, Operand(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label load_in_object_property;
+ static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
+ Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
+
__ mov(r4, Operand(cache_keys));
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
- __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to symbol.
+
+ for (int i = 0; i < kEntriesPerBucket - 1; i++) {
+ Label try_next_entry;
+ // Load map and move r4 to next entry.
+ __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
+ __ cmp(r2, r5);
+ __ b(ne, &try_next_entry);
+ __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
+ __ cmp(r0, r5);
+ __ b(eq, &hit_on_nth_entry[i]);
+ __ bind(&try_next_entry);
+ }
+
+ // Last entry: Load map and move r4 to symbol.
+ __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
__ cmp(r2, r5);
__ b(ne, &slow);
__ ldr(r5, MemOperand(r4));
@@ -1053,13 +1072,25 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// r3 : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
- __ mov(r4, Operand(cache_field_offsets));
- __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
- __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
- __ sub(r5, r5, r6, SetCC);
- __ b(ge, &property_array_property);
+
+ // Hit on nth entry.
+ for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
+ __ bind(&hit_on_nth_entry[i]);
+ __ mov(r4, Operand(cache_field_offsets));
+ if (i != 0) {
+ __ add(r3, r3, Operand(i));
+ }
+ __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
+ __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
+ __ sub(r5, r5, r6, SetCC);
+ __ b(ge, &property_array_property);
+ if (i != 0) {
+ __ jmp(&load_in_object_property);
+ }
+ }
// Load in-object property.
+ __ bind(&load_in_object_property);
__ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ add(r6, r6, r5); // Index from start of object.
__ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
@@ -1109,14 +1140,12 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
Register receiver = r1;
Register index = r0;
- Register scratch1 = r2;
- Register scratch2 = r3;
+ Register scratch = r3;
Register result = r0;
StringCharAtGenerator char_at_generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1283,14 +1312,16 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Label slow, array, extra, check_if_double_array;
Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
+ Label transition_smi_elements, finish_object_store, non_double_value;
+ Label transition_double_elements;
// Register usage.
Register value = r0;
Register key = r1;
Register receiver = r2;
- Register elements = r3; // Elements array of the receiver.
+ Register receiver_map = r3;
Register elements_map = r6;
- Register receiver_map = r7;
+ Register elements = r7; // Elements array of the receiver.
// r4 and r5 are used as general scratch registers.
// Check that the key is a smi.
@@ -1388,9 +1419,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ Ret();
__ bind(&non_smi_value);
- // Escape to slow case when writing non-smi into smi-only array.
- __ CheckFastObjectElements(receiver_map, scratch_value, &slow);
+ // Escape to elements kind transition case.
+ __ CheckFastObjectElements(receiver_map, scratch_value,
+ &transition_smi_elements);
// Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
__ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
__ str(value, MemOperand(address));
@@ -1416,12 +1449,56 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
key,
receiver,
elements,
+ r3,
r4,
r5,
r6,
- r7,
- &slow);
+ &transition_double_elements);
__ Ret();
+
+ __ bind(&transition_smi_elements);
+ // Transition the array appropriately depending on the value type.
+ __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
+ __ b(ne, &non_double_value);
+
+ // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ receiver_map,
+ r4,
+ &slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ r4,
+ &slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ r4,
+ &slow);
+ ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
+ __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
}
@@ -1439,7 +1516,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, r1, r2, r3, r4, r5);
+ masm, flags, r1, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1471,11 +1548,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// -- lr : return address
// -----------------------------------
//
- // This accepts as a receiver anything JSObject::SetElementsLength accepts
- // (currently anything except for external and pixel arrays which means
- // anything with elements of FixedArray type.), but currently is restricted
- // to JSArray.
- // Value must be a number, but only smis are accepted as the most common case.
+ // This accepts as a receiver anything JSArray::SetElementsLength accepts
+ // (currently anything except for external arrays which means anything with
+ // elements of FixedArray type). Value must be a number, but only smis are
+ // accepted as the most common case.
Label miss;
@@ -1497,6 +1573,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
__ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
__ b(ne, &miss);
+ // Check that the array has fast properties, otherwise the length
+ // property might have been redefined.
+ __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
+ __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
+ __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
+ __ b(eq, &miss);
+
// Check that value is a smi.
__ JumpIfNotSmi(value, &miss);
@@ -1589,6 +1672,9 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
rewritten = stub.GetCode();
} else {
ICCompareStub stub(op_, state);
+ if (state == KNOWN_OBJECTS) {
+ stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
+ }
rewritten = stub.GetCode();
}
set_target(*rewritten);
diff --git a/src/3rdparty/v8/src/arm/lithium-arm.cc b/src/3rdparty/v8/src/arm/lithium-arm.cc
index b1cd8d8..4c081e2 100644
--- a/src/3rdparty/v8/src/arm/lithium-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -228,6 +228,13 @@ void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
}
+void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if is_string(");
+ InputAt(0)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
InputAt(0)->PrintTo(stream);
@@ -242,6 +249,14 @@ void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
}
+void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if string_compare(");
+ InputAt(0)->PrintTo(stream);
+ InputAt(1)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
InputAt(0)->PrintTo(stream);
@@ -425,7 +440,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -461,7 +476,7 @@ void LChunk::MarkEmptyBlocks() {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -536,8 +551,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -566,20 +581,15 @@ void LChunkBuilder::Abort(const char* format, ...) {
}
-LRegister* LChunkBuilder::ToOperand(Register reg) {
- return LRegister::Create(Register::ToAllocationIndex(reg));
-}
-
-
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- DoubleRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ DoubleRegister::ToAllocationIndex(reg));
}
@@ -594,30 +604,30 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
- LUnallocated::USED_AT_START));
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
- LUnallocated::USED_AT_START));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
+ LUnallocated::USED_AT_START));
}
@@ -652,7 +662,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -661,7 +671,7 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
HInstruction* instr = HInstruction::cast(value);
VisitInstruction(instr);
}
- allocator_->RecordUse(value, operand);
+ operand->set_virtual_register(value->id());
return operand;
}
@@ -669,36 +679,33 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
template<int I, int T>
LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result) {
- allocator_->RecordDefinition(current_instruction_, result);
+ result->set_virtual_register(current_instruction_->id());
instr->set_result(result);
return instr;
}
template<int I, int T>
-LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::NONE));
-}
-
-
-template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr, int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -780,44 +787,46 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
- allocator_->RecordTemporary(operand);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ operand->set_virtual_register(allocator_->GetVirtualRegister());
+ if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
}
LOperand* LChunkBuilder::FixedTemp(Register reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -829,7 +838,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -863,7 +872,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineAsRegister(new LShiftI(op, left, right, does_deopt));
+ DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -876,7 +885,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineAsRegister(result);
}
@@ -894,7 +903,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, r1);
LOperand* right_operand = UseFixed(right, r0);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -990,14 +1000,18 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber);
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
+ int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1006,31 +1020,44 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument((*argument_index_accumulator)++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
+ *argument_index_accumulator = argument_index;
+ }
+
return result;
}
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
- HValue* v = instr->value();
- if (v->EmitAtUses()) {
- HBasicBlock* successor = HConstant::cast(v)->ToBoolean()
+ HValue* value = instr->value();
+ if (value->EmitAtUses()) {
+ HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
}
- return AssignEnvironment(new LBranch(UseRegister(v)));
+
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
+ // Tagged values that are not known smis or booleans require a
+ // deoptimization environment.
+ Representation rep = value->representation();
+ HType type = value->type();
+ if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
+ return AssignEnvironment(result);
+ }
+ return result;
}
@@ -1039,23 +1066,24 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LCmpMapAndBranch(value, temp);
+ return new(zone()) LCmpMapAndBranch(value, temp);
}
-LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(UseRegister(length->value())));
+LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
+ LOperand* value = UseRegister(instr->value());
+ return DefineAsRegister(new(zone()) LArgumentsLength(value));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstanceOf* result =
- new LInstanceOf(UseFixed(instr->left(), r0),
+ new(zone()) LInstanceOf(UseFixed(instr->left(), r0),
UseFixed(instr->right(), r1));
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1064,17 +1092,26 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), r0), FixedTemp(r4));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), r0),
+ FixedTemp(r4));
return MarkAsCall(DefineFixed(result, r0), instr);
}
+LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
+ LOperand* receiver = UseRegisterAtStart(instr->receiver());
+ LOperand* function = UseRegisterAtStart(instr->function());
+ LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
+ return AssignEnvironment(DefineSameAsFirst(result));
+}
+
+
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), r1);
LOperand* receiver = UseFixed(instr->receiver(), r0);
LOperand* length = UseFixed(instr->length(), r2);
LOperand* elements = UseFixed(instr->elements(), r3);
- LApplyArguments* result = new LApplyArguments(function,
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
receiver,
length,
elements);
@@ -1085,63 +1122,75 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = Use(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
+}
+
+
+LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalObject(context, instr->qml_global()));
+ return DefineAsRegister(new(zone()) LGlobalObject(context));
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, r0), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), r1);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), d2);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, NULL);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
return MarkAsCall(DefineFixedDouble(result, d2), instr);
+ } else if (op == kMathPowHalf) {
+ LOperand* input = UseFixedDouble(instr->value(), d2);
+ LOperand* temp = FixedTemp(d3);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
+ return DefineFixedDouble(result, d2);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
@@ -1151,8 +1200,6 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
return DefineAsRegister(result);
case kMathRound:
return AssignEnvironment(DefineAsRegister(result));
- case kMathPowHalf:
- return DefineAsRegister(result);
default:
UNREACHABLE();
return NULL;
@@ -1165,45 +1212,47 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
argument_count_ -= instr->argument_count();
LOperand* key = UseFixed(instr->key(), r2);
- return MarkAsCall(DefineFixed(new LCallKeyed(key), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), r0), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, r0), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal(instr->qml_global()), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, r0), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, r0), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), r1);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
+ LOperand* function = UseFixed(instr->function(), r1);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallFunction, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), r0),
+ instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, r0), instr);
}
@@ -1229,7 +1278,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineAsRegister(new LBitI(left, right));
+ return DefineAsRegister(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1237,7 +1286,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
}
@@ -1246,7 +1295,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
- return DefineAsRegister(new LBitNotI(UseRegisterAtStart(instr->value())));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1262,7 +1312,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* dividend = UseFixed(instr->left(), r0);
LOperand* divisor = UseFixed(instr->right(), r1);
return AssignEnvironment(AssignPointerMap(
- DefineFixed(new LDivI(dividend, divisor), r0)));
+ DefineFixed(new(zone()) LDivI(dividend, divisor), r0)));
} else {
return DoArithmeticT(Token::DIV, instr);
}
@@ -1278,15 +1328,15 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- mod = new LModI(value, UseOrConstant(instr->right()));
+ mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
} else {
LOperand* dividend = UseRegister(instr->left());
LOperand* divisor = UseRegister(instr->right());
- mod = new LModI(dividend,
- divisor,
- TempRegister(),
- FixedTemp(d10),
- FixedTemp(d11));
+ mod = new(zone()) LModI(dividend,
+ divisor,
+ TempRegister(),
+ FixedTemp(d10),
+ FixedTemp(d11));
}
if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
@@ -1304,7 +1354,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), d1);
LOperand* right = UseFixedDouble(instr->right(), d2);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, d1), instr);
}
}
@@ -1325,7 +1375,12 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
} else {
left = UseRegisterAtStart(instr->LeastConstantOperand());
}
- return AssignEnvironment(DefineAsRegister(new LMulI(left, right, temp)));
+ LMulI* mul = new(zone()) LMulI(left, right, temp);
+ if (instr->CheckFlag(HValue::kCanOverflow) ||
+ instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ AssignEnvironment(mul);
+ }
+ return DefineAsRegister(mul);
} else if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::MUL, instr);
@@ -1342,7 +1397,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineAsRegister(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1362,7 +1417,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineAsRegister(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1386,20 +1441,29 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
LOperand* left = UseFixedDouble(instr->left(), d1);
LOperand* right = exponent_type.IsDouble() ?
UseFixedDouble(instr->right(), d2) :
- UseFixed(instr->right(), r0);
- LPower* result = new LPower(left, right);
+ UseFixed(instr->right(), r2);
+ LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, d3),
instr,
CAN_DEOPTIMIZE_EAGERLY);
}
+LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->global_object()->representation().IsTagged());
+ LOperand* global_object = UseFixed(instr->global_object(), r0);
+ LRandom* result = new(zone()) LRandom(global_object);
+ return MarkAsCall(DefineFixedDouble(result, d7), instr);
+}
+
+
LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), r1);
LOperand* right = UseFixed(instr->right(), r0);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -1412,14 +1476,14 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseRegisterOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1428,47 +1492,70 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LCmpConstantEqAndBranch(value);
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* temp = TempRegister();
+ return new(zone()) LIsObjectAndBranch(value, temp);
+}
+
+
+LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsStringAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
+}
+
+
+LInstruction* LChunkBuilder::DoStringCompareAndBranch(
+ HStringCompareAndBranch* instr) {
+ ASSERT(instr->left()->representation().IsTagged());
+ ASSERT(instr->right()->representation().IsTagged());
+ LOperand* left = UseFixed(instr->left(), r1);
+ LOperand* right = UseFixed(instr->right(), r0);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
+ return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1477,14 +1564,14 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(
+ return new(zone()) LHasCachedArrayIndexAndBranch(
UseRegisterAtStart(instr->value()));
}
@@ -1492,40 +1579,48 @@ LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LClassOfTestAndBranch(UseTempRegister(instr->value()),
- TempRegister());
+ LOperand* value = UseRegister(instr->value());
+ return new(zone()) LClassOfTestAndBranch(value, TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object, TempRegister());
- return AssignEnvironment(DefineAsRegister(result));
+ LValueOf* result = new(zone()) LValueOf(object, TempRegister());
+ return DefineAsRegister(result);
+}
+
+
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* object = UseFixed(instr->value(), r0);
+ LDateField* result = new LDateField(object, FixedTemp(r1), instr->index());
+ return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- UseRegister(instr->length())));
+ LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* length = UseRegister(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1538,7 +1633,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), r0);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1561,22 +1656,25 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
- LOperand* value = UseRegister(instr->value());
+ LOperand* value = UseRegisterAtStart(instr->value());
bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
if (!needs_check) {
- res = DefineSameAsFirst(new LSmiUntag(value, needs_check));
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
: NULL;
LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(d11)
: NULL;
- res = DefineSameAsFirst(new LTaggedToI(value, temp1, temp2, temp3));
+ res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
+ temp1,
+ temp2,
+ temp3));
res = AssignEnvironment(res);
}
return res;
@@ -1590,32 +1688,31 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that the temp and result_temp registers are
// different.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp1, temp2);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
Define(result, result_temp);
return AssignPointerMap(result);
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- LDoubleToI* res =
- new LDoubleToI(value,
- TempRegister(),
- instr->CanTruncateToInt32() ? TempRegister() : NULL);
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
+ LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
return AssignEnvironment(DefineAsRegister(res));
}
} else if (from.IsInteger32()) {
if (to.IsTagged()) {
HValue* val = instr->value();
- LOperand* value = UseRegister(val);
+ LOperand* value = UseRegisterAtStart(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineSameAsFirst(new LSmiTag(value));
+ return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
- return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
+ return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
} else {
ASSERT(to.IsDouble());
LOperand* value = Use(instr->value());
- return DefineAsRegister(new LInteger32ToDouble(value));
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1625,13 +1722,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckInstanceType(value);
+ LInstruction* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
@@ -1639,26 +1736,26 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new LCheckPrototypeMaps(temp1, temp2);
+ LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -1668,57 +1765,32 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
- return DefineAsRegister(new LClampDToUint8(reg, FixedTemp(d11)));
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(d11)));
} else if (input_rep.IsInteger32()) {
- return DefineAsRegister(new LClampIToUint8(reg));
+ return DefineAsRegister(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve d1 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg, FixedTemp(d11));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(d11));
return AssignEnvironment(DefineAsRegister(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LDoubleToI* res = new LDoubleToI(reg, temp1, temp2);
- return AssignEnvironment(DefineAsRegister(res));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LOperand* temp3 = FixedTemp(d11);
- LTaggedToI* res = new LTaggedToI(reg, temp1, temp2, temp3);
- return AssignEnvironment(DefineSameAsFirst(res));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), r0));
+ return new(zone()) LReturn(UseFixed(instr->value(), r0));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
- return DefineAsRegister(new LConstantD);
+ return DefineAsRegister(new(zone()) LConstantD);
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1727,7 +1799,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1736,17 +1808,18 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), r0);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
- LOperand* temp = TempRegister();
- LOperand* value = UseTempRegister(instr->value());
- LInstruction* result = new LStoreGlobalCell(value, temp);
- if (instr->RequiresHoleCheck()) result = AssignEnvironment(result);
- return result;
+ LOperand* value = UseRegister(instr->value());
+ // Use a temp to check the value in the cell in the case where we perform
+ // a hole check.
+ return instr->RequiresHoleCheck()
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
@@ -1754,14 +1827,16 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), r1);
LOperand* value = UseFixed(instr->value(), r0);
LStoreGlobalGeneric* result =
- new LStoreGlobalGeneric(global_object, value);
+ new(zone()) LStoreGlobalGeneric(global_object, value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1775,13 +1850,14 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
context = UseRegister(instr->context());
value = UseRegister(instr->value());
}
- return new LStoreContextSlot(context, value);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value);
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
return DefineAsRegister(
- new LLoadNamedField(UseRegisterAtStart(instr->object())));
+ new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
}
@@ -1790,11 +1866,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), r0);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, r0), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1802,7 +1880,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), r0);
- LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), r0);
+ LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), r0);
return MarkAsCall(result, instr);
}
@@ -1810,20 +1888,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1833,8 +1911,9 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineAsRegister(result));
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
+ if (instr->RequiresHoleCheck()) AssignEnvironment(result);
+ return DefineAsRegister(result);
}
@@ -1845,7 +1924,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseTempRegister(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1853,19 +1932,18 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
HLoadKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
- Representation representation(instr->representation());
ASSERT(
- (representation.IsInteger32() &&
+ (instr->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->key()->representation().IsInteger32());
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1879,7 +1957,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* key = UseFixed(instr->key(), r0);
LInstruction* result =
- DefineFixed(new LLoadKeyedGeneric(object, key), r0);
+ DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), r0);
return MarkAsCall(result, instr);
}
@@ -1898,8 +1976,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
-
- return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1913,19 +1990,18 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
HStoreKeyedSpecializedArrayElement* instr) {
- Representation representation(instr->value()->representation());
ElementsKind elements_kind = instr->elements_kind();
ASSERT(
- (representation.IsInteger32() &&
+ (instr->value()->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->value()->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->external_pointer()->representation().IsExternal());
@@ -1940,9 +2016,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -1955,7 +2031,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+ return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
}
@@ -1966,14 +2042,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, NULL);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), r0);
LOperand* fixed_object_reg = FixedTemp(r2);
LOperand* new_map_reg = FixedTemp(r3);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, r0), instr);
}
}
@@ -1990,7 +2068,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new LStoreNamedField(obj, val);
+ return new(zone()) LStoreNamedField(obj, val);
}
@@ -1998,7 +2076,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* obj = UseFixed(instr->object(), r1);
LOperand* val = UseFixed(instr->value(), r0);
- LInstruction* result = new LStoreNamedGeneric(obj, val);
+ LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
return MarkAsCall(result, instr);
}
@@ -2006,55 +2084,67 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), r0),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new LAllocateObject(TempRegister(), TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
+}
+
+
+LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, r0), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LOperand* object = UseFixed(instr->object(), r0);
LOperand* key = UseFixed(instr->key(), r1);
- LDeleteProperty* result = new LDeleteProperty(object, key);
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, r0), instr);
}
@@ -2062,13 +2152,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2078,13 +2168,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, r0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, r0), instr);
}
@@ -2101,32 +2191,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), r0);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseFixed(instr->value(), r0));
+ LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), r0));
return MarkAsCall(DefineFixed(result, r0), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2149,7 +2240,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new LLazyBailout;
+ LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2163,10 +2254,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2175,9 +2266,14 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
HEnvironment* outer = current_block_->last_environment();
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
+ instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
+ if (instr->arguments() != NULL) {
+ inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ }
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2185,7 +2281,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
- HEnvironment* outer = current_block_->last_environment()->outer();
+ HEnvironment* outer = current_block_->last_environment()->
+ DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
return NULL;
}
@@ -2194,9 +2291,37 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseRegisterAtStart(instr->key());
LOperand* object = UseRegisterAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, r0), instr);
}
+LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
+ LOperand* object = UseFixed(instr->enumerable(), r0);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
+ return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
+ LOperand* map = UseRegister(instr->map());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LForInCacheArray(map)));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* map = UseRegisterAtStart(instr->map());
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
+ LOperand* object = UseRegister(instr->object());
+ LOperand* index = UseRegister(instr->index());
+ return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/arm/lithium-arm.h b/src/3rdparty/v8/src/arm/lithium-arm.h
index 8f4e97d..a919a1d 100644
--- a/src/3rdparty/v8/src/arm/lithium-arm.h
+++ b/src/3rdparty/v8/src/arm/lithium-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -71,7 +72,7 @@ class LCodeGen;
V(CheckFunction) \
V(CheckInstanceType) \
V(CheckNonSmi) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
@@ -87,11 +88,13 @@ class LCodeGen;
V(ConstantI) \
V(ConstantT) \
V(Context) \
+ V(DeclareGlobals) \
V(DeleteProperty) \
V(Deoptimize) \
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
+ V(FastLiteral) \
V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
@@ -109,8 +112,10 @@ class LCodeGen;
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
V(IsObjectAndBranch) \
+ V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
+ V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -138,6 +143,7 @@ class LCodeGen;
V(Parameter) \
V(Power) \
V(PushArgument) \
+ V(Random) \
V(RegExpLiteral) \
V(Return) \
V(ShiftI) \
@@ -167,7 +173,13 @@ class LCodeGen;
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
- V(ValueOf)
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex) \
+ V(DateField) \
+ V(WrapReceiver)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -457,6 +469,20 @@ class LControlInstruction: public LTemplateInstruction<0, I, T> {
};
+class LWrapReceiver: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LWrapReceiver(LOperand* receiver, LOperand* function) {
+ inputs_[0] = receiver;
+ inputs_[1] = function;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
+
+ LOperand* receiver() { return inputs_[0]; }
+ LOperand* function() { return inputs_[1]; }
+};
+
+
class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
public:
LApplyArguments(LOperand* function,
@@ -658,6 +684,20 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
};
+class LIsStringAndBranch: public LControlInstruction<1, 1> {
+ public:
+ LIsStringAndBranch(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LIsSmiAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsSmiAndBranch(LOperand* value) {
@@ -686,6 +726,23 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
};
+class LStringCompareAndBranch: public LControlInstruction<2, 0> {
+ public:
+ LStringCompareAndBranch(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
+ "string-compare-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
+
+ Token::Value op() const { return hydrogen()->token(); }
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
public:
explicit LHasInstanceTypeAndBranch(LOperand* value) {
@@ -948,6 +1005,41 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
};
+class LDateField: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LDateField(LOperand* date, LOperand* temp, Smi* index) : index_(index) {
+ inputs_[0] = date;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(ValueOf)
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
+class LSetDateField: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LSetDateField(LOperand* date, LOperand* value, LOperand* temp, int index)
+ : index_(index) {
+ inputs_[0] = date;
+ inputs_[1] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField, "date-set-field")
+ DECLARE_HYDROGEN_ACCESSOR(DateField)
+
+ int index() const { return index_; }
+
+ private:
+ int index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
@@ -992,6 +1084,17 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
};
+class LRandom: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LRandom(LOperand* global_object) {
+ inputs_[0] = global_object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Random, "random")
+ DECLARE_HYDROGEN_ACCESSOR(Random)
+};
+
+
class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
public:
LArithmeticD(Token::Value op, LOperand* left, LOperand* right)
@@ -1208,6 +1311,8 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
+
+ LOperand* value() { return inputs_[0]; }
};
@@ -1225,7 +1330,7 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
LOperand* value() { return InputAt(1); }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1298,19 +1403,22 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
};
+class LDeclareGlobals: public LTemplateInstruction<0, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals, "declare-globals")
+ DECLARE_HYDROGEN_ACCESSOR(DeclareGlobals)
+};
+
+
class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LGlobalObject(LOperand* context, bool qml_global) {
+ explicit LGlobalObject(LOperand* context) {
inputs_[0] = context;
- qml_global_ = qml_global;
}
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
LOperand* context() { return InputAt(0); }
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
};
@@ -1383,12 +1491,17 @@ class LCallNamed: public LTemplateInstruction<1, 0, 0> {
};
-class LCallFunction: public LTemplateInstruction<1, 0, 0> {
+class LCallFunction: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallFunction(LOperand* function) {
+ inputs_[0] = function;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- int arity() const { return hydrogen()->argument_count() - 2; }
+ LOperand* function() { return inputs_[0]; }
+ int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1397,16 +1510,10 @@ class LCallGlobal: public LTemplateInstruction<1, 0, 0> {
DECLARE_CONCRETE_INSTRUCTION(CallGlobal, "call-global")
DECLARE_HYDROGEN_ACCESSOR(CallGlobal)
- explicit LCallGlobal(bool qml_global) : qml_global_(qml_global) {}
-
virtual void PrintDataTo(StringStream* stream);
Handle<String> name() const {return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
-
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
};
@@ -1590,7 +1697,6 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
- bool strict_mode() { return strict_mode_flag() == kStrictMode; }
};
@@ -1652,7 +1758,7 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
@@ -1765,6 +1871,8 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return InputAt(0); }
+
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
};
@@ -1781,14 +1889,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -1865,6 +1973,25 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
+ public:
+ LAllocateObject(LOperand* temp1, LOperand* temp2) {
+ temps_[0] = temp1;
+ temps_[1] = temp2;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
+class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
+ DECLARE_HYDROGEN_ACCESSOR(FastLiteral)
+};
+
+
class LArrayLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ArrayLiteral, "array-literal")
@@ -2005,6 +2132,62 @@ class LIn: public LTemplateInstruction<1, 2, 0> {
};
+class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInPrepareMap(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
+};
+
+
+class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInCacheArray(LOperand* map) {
+ inputs_[0] = map;
+ }
+
+ LOperand* map() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
+
+ int idx() {
+ return HForInCacheArray::cast(this->hydrogen_value())->idx();
+ }
+};
+
+
+class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
+ public:
+ LCheckMapValue(LOperand* value, LOperand* map) {
+ inputs_[0] = value;
+ inputs_[1] = map;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* map() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
+};
+
+
+class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadFieldByIndex(LOperand* object, LOperand* index) {
+ inputs_[0] = object;
+ inputs_[1] = index;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+ LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
+};
+
+
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
@@ -2072,6 +2255,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2101,6 +2285,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2110,7 +2295,6 @@ class LChunkBuilder BASE_EMBEDDED {
void Abort(const char* format, ...);
// Methods for getting operands for Use / Define / Temp.
- LRegister* ToOperand(Register reg);
LUnallocated* ToUnallocated(Register reg);
LUnallocated* ToUnallocated(DoubleRegister reg);
@@ -2161,8 +2345,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result);
template<int I, int T>
- LInstruction* Define(LTemplateInstruction<1, I, T>* instr);
- template<int I, int T>
LInstruction* DefineAsRegister(LTemplateInstruction<1, I, T>* instr);
template<int I, int T>
LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr,
@@ -2208,6 +2390,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc b/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
index 2a80ff9..7c37e8e 100644
--- a/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-codegen-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,44 +40,29 @@ class SafepointGenerator : public CallWrapper {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index)
+ Safepoint::DeoptMode mode)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index) { }
+ deopt_mode_(mode) { }
virtual ~SafepointGenerator() { }
- virtual void BeforeCall(int call_size) const {
- ASSERT(call_size >= 0);
- // Ensure that we have enough space after the previous safepoint position
- // for the generated code there.
- int call_end = codegen_->masm()->pc_offset() + call_size;
- int prev_jump_end =
- codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
- if (call_end < prev_jump_end) {
- int padding_size = prev_jump_end - call_end;
- ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
- while (padding_size > 0) {
- codegen_->masm()->nop();
- padding_size -= Assembler::kInstrSize;
- }
- }
- }
+ virtual void BeforeCall(int call_size) const { }
virtual void AfterCall() const {
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
+ codegen_->RecordSafepoint(pointers_, deopt_mode_);
}
private:
LCodeGen* codegen_;
LPointerMap* pointers_;
- int deoptimization_index_;
+ Safepoint::DeoptMode deopt_mode_;
};
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope1(VFP3);
@@ -103,7 +88,6 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(GetStackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
- Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
@@ -159,7 +143,7 @@ bool LCodeGen::GeneratePrologue() {
// with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for
// function calls.
- if (info_->is_strict_mode() || info_->is_native()) {
+ if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
__ cmp(r5, Operand(0));
__ b(eq, &ok);
@@ -190,18 +174,17 @@ bool LCodeGen::GeneratePrologue() {
// Possibly allocate a local context.
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is in r1.
__ push(r1);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
}
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
+ RecordSafepoint(Safepoint::kNoLazyDeopt);
// Context is returned in both r0 and cp. It replaces the context
// passed to us. It's saved in the stack and kept live in cp.
__ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -250,19 +233,11 @@ bool LCodeGen::GenerateBody() {
instr->CompileToNative(this);
}
}
+ EnsureSpaceForLazyDeopt();
return !is_aborted();
}
-LInstruction* LCodeGen::GetNextInstruction() {
- if (current_instruction_ < instructions_->length() - 1) {
- return instructions_->at(current_instruction_ + 1);
- } else {
- return NULL;
- }
-}
-
-
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@@ -275,13 +250,6 @@ bool LCodeGen::GenerateDeferredCode() {
code->Generate();
__ jmp(code->exit());
}
-
- // Pad code to ensure that the last piece of deferred code have
- // room for lazy bailout.
- while ((masm()->pc_offset() - LastSafepointEnd())
- < Deoptimizer::patch_size()) {
- __ nop();
- }
}
// Force constant pool emission at the end of the deferred code to make
@@ -294,7 +262,7 @@ bool LCodeGen::GenerateDeferredCode() {
bool LCodeGen::GenerateDeoptJumpTable() {
// Check that the jump table is accessible from everywhere in the function
- // code, ie that offsets to the table can be encoded in the 24bit signed
+ // code, i.e. that offsets to the table can be encoded in the 24bit signed
// immediate of a branch instruction.
// To simplify we consider the code size from the first instruction to the
// end of the jump table. We also don't consider the pc load delta.
@@ -353,7 +321,22 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
if (op->IsRegister()) {
return ToRegister(op->index());
} else if (op->IsConstantOperand()) {
- __ mov(scratch, ToOperand(op));
+ LConstantOperand* const_op = LConstantOperand::cast(op);
+ Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ Representation r = chunk_->LookupLiteralRepresentation(const_op);
+ if (r.IsInteger32()) {
+ ASSERT(literal->IsNumber());
+ __ mov(scratch, Operand(static_cast<int32_t>(literal->Number())));
+ } else if (r.IsDouble()) {
+ Abort("EmitLoadRegister: Unsupported double immediate.");
+ } else {
+ ASSERT(r.IsTagged());
+ if (literal->IsSmi()) {
+ __ mov(scratch, Operand(literal));
+ } else {
+ __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
+ }
+ }
return scratch;
} else if (op->IsStackSlot() || op->IsArgument()) {
__ ldr(scratch, ToMemOperand(op));
@@ -402,6 +385,18 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
}
+Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
+ Handle<Object> literal = chunk_->LookupLiteral(op);
+ ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
+ return literal;
+}
+
+
+bool LCodeGen::IsInteger32(LConstantOperand* op) const {
+ return chunk_->LookupLiteralRepresentation(op).IsInteger32();
+}
+
+
int LCodeGen::ToInteger32(LConstantOperand* op) const {
Handle<Object> value = chunk_->LookupLiteral(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
@@ -484,7 +479,19 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- translation->BeginFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
+ }
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -567,7 +574,7 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
__ Call(code, mode);
- RegisterLazyDeoptimization(instr, safepoint_mode);
+ RecordSafepointWithLazyDeopt(instr, safepoint_mode);
// Signal that we don't inline smi code before these stubs in the
// optimizing code generator.
@@ -587,7 +594,7 @@ void LCodeGen::CallRuntime(const Runtime::Function* function,
RecordPosition(pointers->position());
__ CallRuntime(function, num_arguments);
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
}
@@ -596,37 +603,12 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
LInstruction* instr) {
__ CallRuntimeSaveDoubles(id);
RecordSafepointWithRegisters(
- instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
-}
-
-
-void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode) {
- // Create the environment to bailout to. If the call has side effects
- // execution has to continue after the call otherwise execution can continue
- // from a previous bailout point repeating the call.
- LEnvironment* deoptimization_environment;
- if (instr->HasDeoptimizationEnvironment()) {
- deoptimization_environment = instr->deoptimization_environment();
- } else {
- deoptimization_environment = instr->environment();
- }
-
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
- if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
- RecordSafepoint(instr->pointer_map(),
- deoptimization_environment->deoptimization_index());
- } else {
- ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- RecordSafepointWithRegisters(
- instr->pointer_map(),
- 0,
- deoptimization_environment->deoptimization_index());
- }
+ instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
}
-void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
+void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode) {
if (!environment->HasBeenRegistered()) {
// Physical stack frame layout:
// -x ............. -4 0 ..................................... y
@@ -642,24 +624,30 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
// |>------------ translation_size ------------<|
int frame_count = 0;
+ int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
+ if (e->frame_type() == JS_FUNCTION) {
+ ++jsframe_count;
+ }
}
- Translation translation(&translations_, frame_count);
+ Translation translation(&translations_, frame_count, jsframe_count);
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
- environment->Register(deoptimization_index, translation.index());
+ int pc_offset = masm()->pc_offset();
+ environment->Register(deoptimization_index,
+ translation.index(),
+ (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
deoptimizations_.Add(environment);
}
}
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
- ASSERT(entry != NULL);
if (entry == NULL) {
Abort("bailout was not prepared");
return;
@@ -692,7 +680,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
@@ -717,6 +704,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
+ data->SetPc(i, Smi::FromInt(env->pc_offset()));
}
code->set_deoptimization_data(*data);
}
@@ -748,16 +736,28 @@ void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
}
+void LCodeGen::RecordSafepointWithLazyDeopt(
+ LInstruction* instr, SafepointMode safepoint_mode) {
+ if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
+ RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
+ } else {
+ ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 0, Safepoint::kLazyDeopt);
+ }
+}
+
+
void LCodeGen::RecordSafepoint(
LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index) {
+ Safepoint::DeoptMode deopt_mode) {
ASSERT(expected_safepoint_kind_ == kind);
const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
- kind, arguments, deoptimization_index);
+ kind, arguments, deopt_mode);
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
@@ -774,31 +774,31 @@ void LCodeGen::RecordSafepoint(
void LCodeGen::RecordSafepoint(LPointerMap* pointers,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
}
-void LCodeGen::RecordSafepoint(int deoptimization_index) {
+void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
LPointerMap empty_pointers(RelocInfo::kNoPosition);
- RecordSafepoint(&empty_pointers, deoptimization_index);
+ RecordSafepoint(&empty_pointers, deopt_mode);
}
void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(
+ pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
}
void LCodeGen::RecordSafepointWithRegistersAndDoubles(
LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(
+ pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
}
@@ -833,12 +833,6 @@ void LCodeGen::DoGap(LGap* gap) {
LParallelMove* move = gap->GetParallelMove(inner_pos);
if (move != NULL) DoParallelMove(move);
}
-
- LInstruction* next = GetNextInstruction();
- if (next != NULL && next->IsLazyBailout()) {
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
- }
}
@@ -1146,7 +1140,7 @@ void LCodeGen::DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(instr->pointer_map(),
0,
- Safepoint::kNoDeoptimizationIndex);
+ Safepoint::kNoLazyDeopt);
// Overwrite the stored value of r0 with the result of the stub.
__ StoreToSafepointRegistersAndDoublesSlot(r0, r0);
}
@@ -1384,8 +1378,13 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
void LCodeGen::DoConstantT(LConstantT* instr) {
- ASSERT(instr->result()->IsRegister());
- __ mov(ToRegister(instr->result()), Operand(instr->value()));
+ Handle<Object> value = instr->value();
+ if (value->IsSmi()) {
+ __ mov(ToRegister(instr->result()), Operand(value));
+ } else {
+ __ LoadHeapObject(ToRegister(instr->result()),
+ Handle<HeapObject>::cast(value));
+ }
}
@@ -1438,6 +1437,46 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(r0));
+ ASSERT(!scratch.is(scratch0()));
+ ASSERT(!scratch.is(object));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CompareObjectType(object, scratch, scratch, JS_DATE_TYPE);
+ __ Assert(eq, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand(stamp));
+ __ ldr(scratch, MemOperand(scratch));
+ __ ldr(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ cmp(scratch, scratch0());
+ __ b(ne, &runtime);
+ __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(r1, Operand(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
Register input = ToRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
@@ -1859,6 +1898,31 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
}
+Condition LCodeGen::EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string) {
+ __ JumpIfSmi(input, is_not_string);
+ __ CompareObjectType(input, temp1, temp1, FIRST_NONSTRING_TYPE);
+
+ return lt;
+}
+
+
+void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
+ Register reg = ToRegister(instr->InputAt(0));
+ Register temp1 = ToRegister(instr->TempAt(0));
+
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
+
+ Condition true_cond =
+ EmitIsString(reg, temp1, false_label);
+
+ EmitBranch(true_block, false_block, true_cond);
+}
+
+
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1884,6 +1948,41 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
}
+static Condition ComputeCompareCondition(Token::Value op) {
+ switch (op) {
+ case Token::EQ_STRICT:
+ case Token::EQ:
+ return eq;
+ case Token::LT:
+ return lt;
+ case Token::GT:
+ return gt;
+ case Token::LTE:
+ return le;
+ case Token::GTE:
+ return ge;
+ default:
+ UNREACHABLE();
+ return kNoCondition;
+ }
+}
+
+
+void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
+ Token::Value op = instr->op();
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+ __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined.
+
+ Condition condition = ComputeCompareCondition(op);
+
+ EmitBranch(true_block, false_block, condition);
+}
+
+
static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
@@ -1949,7 +2048,7 @@ void LCodeGen::DoHasCachedArrayIndexAndBranch(
// Branches to a label or falls through with the answer in flags. Trashes
-// the temp registers, but not the input. Only input and temp2 may alias.
+// the temp registers, but not the input.
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String>class_name,
@@ -1957,7 +2056,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
Register temp,
Register temp2) {
ASSERT(!input.is(temp));
- ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
+ ASSERT(!input.is(temp2));
+ ASSERT(!temp.is(temp2));
+
__ JumpIfSmi(input, is_false);
if (class_name->IsEqualTo(CStrVector("Function"))) {
@@ -2063,7 +2164,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
+ codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
virtual LInstruction* instr() { return instr_; }
Label* map_check() { return &map_check_; }
@@ -2096,7 +2197,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
- __ mov(ip, Operand(factory()->the_hole_value()));
+ Handle<JSGlobalPropertyCell> cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
__ cmp(map, Operand(ip));
__ b(ne, &cache_miss);
// We use Factory::the_hole_value() on purpose instead of loading from the
@@ -2130,8 +2234,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
}
-void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
+void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check) {
Register result = ToRegister(instr->result());
ASSERT(result.is(r0));
@@ -2151,7 +2255,7 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// offset to the location of the map check.
Register temp = ToRegister(instr->TempAt(0));
ASSERT(temp.is(r4));
- __ mov(InstanceofStub::right(), Operand(instr->function()));
+ __ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 4;
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
@@ -2163,32 +2267,15 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasDeoptimizationEnvironment());
+ LEnvironment* env = instr->deoptimization_environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(result, result);
}
-static Condition ComputeCompareCondition(Token::Value op) {
- switch (op) {
- case Token::EQ_STRICT:
- case Token::EQ:
- return eq;
- case Token::LT:
- return lt;
- case Token::GT:
- return gt;
- case Token::LTE:
- return le;
- case Token::GTE:
- return ge;
- default:
- UNREACHABLE();
- return kNoCondition;
- }
-}
-
-
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
@@ -2246,42 +2333,27 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
- Register value = ToRegister(instr->InputAt(0));
- Register scratch = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
+ Register value = ToRegister(instr->value());
+ Register cell = scratch0();
// Load the cell.
- __ mov(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
+ __ mov(cell, Operand(instr->hydrogen()->cell()));
// If the cell we are storing to contains the hole it could have
// been deleted from the property dictionary. In that case, we need
// to update the property details in the property dictionary to mark
// it as no longer deleted.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ ldr(scratch2,
- FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(scratch2, ip);
+ // We use a temp to check the payload (CompareRoot might clobber ip).
+ Register payload = ToRegister(instr->TempAt(0));
+ __ ldr(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
+ __ CompareRoot(payload, Heap::kTheHoleValueRootIndex);
DeoptimizeIf(eq, instr->environment());
}
// Store the value.
- __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
-
- // Cells are always in the remembered set.
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- HType type = instr->hydrogen()->value()->type();
- SmiCheck check_needed =
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- __ RecordWriteField(scratch,
- JSGlobalPropertyCell::kValueOffset,
- value,
- scratch2,
- kLRHasBeenSaved,
- kSaveFPRegs,
- OMIT_REMEMBERED_SET,
- check_needed);
- }
+ __ str(value, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
+ // Cells are always rescanned, so no write barrier here.
}
@@ -2290,7 +2362,7 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(r0));
__ mov(r2, Operand(instr->name()));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
@@ -2301,13 +2373,37 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
__ ldr(result, ContextOperand(context, instr->slot_index()));
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(result, ip);
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(eq, instr->environment());
+ } else {
+ __ mov(result, Operand(factory()->undefined_value()), LeaveCC, eq);
+ }
+ }
}
void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
Register context = ToRegister(instr->context());
Register value = ToRegister(instr->value());
+ Register scratch = scratch0();
MemOperand target = ContextOperand(context, instr->slot_index());
+
+ Label skip_assignment;
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ ldr(scratch, target);
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(scratch, ip);
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(eq, instr->environment());
+ } else {
+ __ b(ne, &skip_assignment);
+ }
+ }
+
__ str(value, target);
if (instr->hydrogen()->NeedsWriteBarrier()) {
HType type = instr->hydrogen()->value()->type();
@@ -2316,12 +2412,14 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
__ RecordWriteContextSlot(context,
target.offset(),
value,
- scratch0(),
+ scratch,
kLRHasBeenSaved,
kSaveFPRegs,
EMIT_REMEMBERED_SET,
check_needed);
}
+
+ __ bind(&skip_assignment);
}
@@ -2343,7 +2441,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Handle<String> name) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsProperty() &&
+ ASSERT(lookup.IsFound() &&
(lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
if (lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
@@ -2359,7 +2457,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
}
} else {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
- LoadHeapObject(result, Handle<HeapObject>::cast(function));
+ __ LoadHeapObject(result, function);
}
}
@@ -2701,15 +2799,10 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
}
-void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
Register scratch = scratch0();
- ASSERT(receiver.is(r0)); // Used for parameter count.
- ASSERT(function.is(r1)); // Required by InvokeFunction.
- ASSERT(ToRegister(instr->result()).is(r0));
// If the receiver is null or undefined, we have to pass the global
// object as a receiver to normal functions. Values have to be
@@ -2750,6 +2843,18 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ ldr(receiver,
FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
+}
+
+
+void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+ Register receiver = ToRegister(instr->receiver());
+ Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
+ Register scratch = scratch0();
+ ASSERT(receiver.is(r0)); // Used for parameter count.
+ ASSERT(function.is(r1)); // Required by InvokeFunction.
+ ASSERT(ToRegister(instr->result()).is(r0));
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2779,15 +2884,12 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ bind(&invoke);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
// The number of arguments is stored in receiver which is r0, as expected
// by InvokeFunction.
- v8::internal::ParameterCount actual(receiver);
+ ParameterCount actual(receiver);
__ InvokeFunction(function, actual, CALL_FUNCTION,
safepoint_generator, CALL_AS_METHOD);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2807,7 +2909,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- LoadHeapObject(result, instr->hydrogen()->closure());
+ __ LoadHeapObject(result, instr->hydrogen()->closure());
}
@@ -2825,9 +2927,19 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
}
+void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
+ __ push(cp); // The context is the first argument.
+ __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
+ __ push(scratch0());
+ __ mov(scratch0(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
+ __ push(scratch0());
+ CallRuntime(Runtime::kDeclareGlobals, 3, instr);
+}
+
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register result = ToRegister(instr->result());
- __ ldr(result, ContextOperand(cp, instr->qml_global()?Context::QML_GLOBAL_INDEX:Context::GLOBAL_INDEX));
+ __ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
}
@@ -2842,31 +2954,41 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
CallKind call_kind) {
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
- }
-
- // Set r0 to arguments count if adaption is not needed. Assumes that r0
- // is available to write to at this point.
- if (!function->NeedsArgumentsAdaption()) {
- __ mov(r0, Operand(arity));
- }
+ bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
+ function->shared()->formal_parameter_count() == arity;
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- // Invoke function.
- __ SetCallKind(r5, call_kind);
- __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
- __ Call(ip);
+ if (can_invoke_directly) {
+ __ LoadHeapObject(r1, function);
+ // Change context if needed.
+ bool change_context =
+ (info()->closure()->context() != function->context()) ||
+ scope()->contains_with() ||
+ (scope()->num_heap_slots() > 0);
+ if (change_context) {
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+ }
+
+ // Set r0 to arguments count if adaption is not needed. Assumes that r0
+ // is available to write to at this point.
+ if (!function->NeedsArgumentsAdaption()) {
+ __ mov(r0, Operand(arity));
+ }
- // Setup deoptimization.
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
+ // Invoke function.
+ __ SetCallKind(r5, call_kind);
+ __ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
+ __ Call(ip);
+
+ // Set up deoptimization.
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
+ } else {
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(arity);
+ __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
+ }
// Restore context.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2875,7 +2997,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(r0));
- __ mov(r1, Operand(instr->function()));
CallKnownFunction(instr->function(),
instr->arity(),
instr,
@@ -3060,11 +3181,11 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ and_(scratch, result, Operand(HeapNumber::kSignMask));
__ Vmov(double_scratch0(), 0.5);
- __ vadd(input, input, double_scratch0());
+ __ vadd(double_scratch0(), input, double_scratch0());
// Check sign of the result: if the sign changed, the input
// value was in ]0.5, 0[ and the result should be -0.
- __ vmov(result, input.high());
+ __ vmov(result, double_scratch0().high());
__ eor(result, result, Operand(scratch), SetCC);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
DeoptimizeIf(mi, instr->environment());
@@ -3075,7 +3196,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ EmitVFPTruncate(kRoundToMinusInf,
double_scratch0().low(),
- input,
+ double_scratch0(),
result,
scratch);
DeoptimizeIf(ne, instr->environment());
@@ -3104,68 +3225,131 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
DoubleRegister result = ToDoubleRegister(instr->result());
+ DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
+
+ // Note that according to ECMA-262 15.8.2.13:
+ // Math.pow(-Infinity, 0.5) == Infinity
+ // Math.sqrt(-Infinity) == NaN
+ Label done;
+ __ vmov(temp, -V8_INFINITY);
+ __ VFPCompareAndSetFlags(input, temp);
+ __ vneg(result, temp, eq);
+ __ b(&done, eq);
+
// Add +0 to convert -0 to +0.
__ vadd(result, input, kDoubleRegZero);
__ vsqrt(result, result);
+ __ bind(&done);
}
void LCodeGen::DoPower(LPower* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
- Register scratch = scratch0();
- DoubleRegister result_reg = ToDoubleRegister(instr->result());
Representation exponent_type = instr->hydrogen()->right()->representation();
- if (exponent_type.IsDouble()) {
- // Prepare arguments and call C function.
- __ PrepareCallCFunction(0, 2, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left),
- ToDoubleRegister(right));
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 0, 2);
+ // Having marked this as a call, we can use any registers.
+ // Just make sure that the input/output registers are the expected ones.
+ ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
+ ToDoubleRegister(instr->InputAt(1)).is(d2));
+ ASSERT(!instr->InputAt(1)->IsRegister() ||
+ ToRegister(instr->InputAt(1)).is(r2));
+ ASSERT(ToDoubleRegister(instr->InputAt(0)).is(d1));
+ ASSERT(ToDoubleRegister(instr->result()).is(d3));
+
+ if (exponent_type.IsTagged()) {
+ Label no_deopt;
+ __ JumpIfSmi(r2, &no_deopt);
+ __ ldr(r7, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
+ __ cmp(r7, Operand(ip));
+ DeoptimizeIf(ne, instr->environment());
+ __ bind(&no_deopt);
+ MathPowStub stub(MathPowStub::TAGGED);
+ __ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- ASSERT(ToRegister(right).is(r0));
- // Prepare arguments and call C function.
- __ PrepareCallCFunction(1, 1, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
- __ CallCFunction(
- ExternalReference::power_double_int_function(isolate()), 1, 1);
+ MathPowStub stub(MathPowStub::INTEGER);
+ __ CallStub(&stub);
} else {
- ASSERT(exponent_type.IsTagged());
- ASSERT(instr->hydrogen()->left()->representation().IsDouble());
+ ASSERT(exponent_type.IsDouble());
+ MathPowStub stub(MathPowStub::DOUBLE);
+ __ CallStub(&stub);
+ }
+}
- Register right_reg = ToRegister(right);
- // Check for smi on the right hand side.
- Label non_smi, call;
- __ JumpIfNotSmi(right_reg, &non_smi);
+void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
- // Untag smi and convert it to a double.
- __ SmiUntag(right_reg);
- SwVfpRegister single_scratch = double_scratch0().low();
- __ vmov(single_scratch, right_reg);
- __ vcvt_f64_s32(result_reg, single_scratch);
- __ jmp(&call);
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
- // Heap number map check.
- __ bind(&non_smi);
- __ ldr(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
- __ cmp(scratch, Operand(ip));
- DeoptimizeIf(ne, instr->environment());
- int32_t value_offset = HeapNumber::kValueOffset - kHeapObjectTag;
- __ add(scratch, right_reg, Operand(value_offset));
- __ vldr(result_reg, scratch, 0);
+ // Having marked this instruction as a call we can use any
+ // registers.
+ ASSERT(ToDoubleRegister(instr->result()).is(d7));
+ ASSERT(ToRegister(instr->InputAt(0)).is(r0));
- // Prepare arguments and call C function.
- __ bind(&call);
- __ PrepareCallCFunction(0, 2, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 0, 2);
- }
- // Store the result in the result register.
- __ GetCFunctionDoubleResult(result_reg);
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
+
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ ldr(r2, FieldMemOperand(r0, kRandomSeedOffset));
+ // r2: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ ldr(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
+ __ cmp(r1, Operand(0));
+ __ b(eq, deferred->entry());
+ // Load state[1].
+ __ ldr(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
+ // r1: state[0].
+ // r0: state[1].
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ and_(r3, r1, Operand(0xFFFF));
+ __ mov(r4, Operand(18273));
+ __ mul(r3, r3, r4);
+ __ add(r1, r3, Operand(r1, LSR, 16));
+ // Save state[0].
+ __ str(r1, FieldMemOperand(r2, ByteArray::kHeaderSize));
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ and_(r3, r0, Operand(0xFFFF));
+ __ mov(r4, Operand(36969));
+ __ mul(r3, r3, r4);
+ __ add(r0, r3, Operand(r0, LSR, 16));
+ // Save state[1].
+ __ str(r0, FieldMemOperand(r2, ByteArray::kHeaderSize + kSeedSize));
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ and_(r0, r0, Operand(0x3FFFF));
+ __ add(r0, r0, Operand(r1, LSL, 14));
+
+ __ bind(deferred->exit());
+ // 0x41300000 is the top half of 1.0 x 2^20 as a double.
+ // Create this constant using mov/orr to avoid PC relative load.
+ __ mov(r1, Operand(0x41000000));
+ __ orr(r1, r1, Operand(0x300000));
+ // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
+ __ vmov(d7, r0, r1);
+ // Move 0x4130000000000000 to VFP.
+ __ mov(r0, Operand(0, RelocInfo::NONE));
+ __ vmov(d8, r0, r1);
+ // Subtract and store the result in the heap number.
+ __ vsub(d7, d7, d8);
+}
+
+
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, scratch0());
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in r0.
}
@@ -3177,6 +3361,14 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
}
+void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
+ ASSERT(ToDoubleRegister(instr->result()).is(d2));
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::UNTAGGED);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(d2));
TranscendentalCacheStub stub(TranscendentalCache::COS,
@@ -3216,6 +3408,9 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
case kMathSin:
DoMathSin(instr);
break;
+ case kMathTan:
+ DoMathTan(instr);
+ break;
case kMathLog:
DoMathLog(instr);
break;
@@ -3231,10 +3426,8 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(instr->HasPointerMap());
ASSERT(instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount count(instr->arity());
__ InvokeFunction(r1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -3267,12 +3460,12 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
void LCodeGen::DoCallFunction(LCallFunction* instr) {
+ ASSERT(ToRegister(instr->function()).is(r1));
ASSERT(ToRegister(instr->result()).is(r0));
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
- __ Drop(1);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -3292,7 +3485,6 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(r0));
- __ mov(r1, Operand(instr->target()));
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
}
@@ -3301,9 +3493,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
ASSERT(ToRegister(instr->InputAt(0)).is(r1));
ASSERT(ToRegister(instr->result()).is(r0));
- Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
__ mov(r0, Operand(instr->arity()));
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
+ CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
@@ -3367,7 +3559,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
// Name is always in r2.
__ mov(r2, Operand(instr->name()));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3386,13 +3578,6 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
Register scratch = scratch0();
- // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- // conversion, so it deopts in that case.
- if (instr->hydrogen()->ValueNeedsSmiCheck()) {
- __ tst(value, Operand(kSmiTagMask));
- DeoptimizeIf(ne, instr->environment());
- }
-
// Do the store.
if (instr->key()->IsConstantOperand()) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
@@ -3533,7 +3718,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->key()).is(r1));
ASSERT(ToRegister(instr->value()).is(r0));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3601,83 +3786,14 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
LStringCharCodeAt* instr_;
};
- Register string = ToRegister(instr->string());
- Register index = ToRegister(instr->index());
- Register result = ToRegister(instr->result());
-
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- // Fetch the instance type of the receiver into result register.
- __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
- __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
-
- // We need special handling for indirect strings.
- Label check_sequential;
- __ tst(result, Operand(kIsIndirectStringMask));
- __ b(eq, &check_sequential);
-
- // Dispatch on the indirect string shape: slice or cons.
- Label cons_string;
- __ tst(result, Operand(kSlicedNotConsMask));
- __ b(eq, &cons_string);
-
- // Handle slices.
- Label indirect_string_loaded;
- __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
- __ add(index, index, Operand(result, ASR, kSmiTagSize));
- __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
- __ jmp(&indirect_string_loaded);
-
- // Handle conses.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- __ bind(&cons_string);
- __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
- __ LoadRoot(ip, Heap::kEmptyStringRootIndex);
- __ cmp(result, ip);
- __ b(ne, deferred->entry());
- // Get the first of the two strings and load its instance type.
- __ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
-
- __ bind(&indirect_string_loaded);
- __ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
- __ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
-
- // Check whether the string is sequential. The only non-sequential
- // shapes we support have just been unwrapped above.
- __ bind(&check_sequential);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(result, Operand(kStringRepresentationMask));
- __ b(ne, deferred->entry());
-
- // Dispatch on the encoding: ASCII or two-byte.
- Label ascii_string;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ tst(result, Operand(kStringEncodingMask));
- __ b(ne, &ascii_string);
-
- // Two-byte string.
- // Load the two-byte character code into the result register.
- Label done;
- __ add(result,
- string,
- Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- __ ldrh(result, MemOperand(result, index, LSL, 1));
- __ jmp(&done);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ add(result,
- string,
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ ldrb(result, MemOperand(result, index));
-
- __ bind(&done);
+ StringCharLoadGenerator::Generate(masm(),
+ ToRegister(instr->string()),
+ ToRegister(instr->index()),
+ ToRegister(instr->result()),
+ deferred->entry());
__ bind(deferred->exit());
}
@@ -3797,12 +3913,11 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
LNumberTagI* instr_;
};
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
- Register reg = ToRegister(input);
+ Register src = ToRegister(instr->InputAt(0));
+ Register dst = ToRegister(instr->result());
DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
- __ SmiTag(reg, SetCC);
+ __ SmiTag(dst, src, SetCC);
__ b(vs, deferred->entry());
__ bind(deferred->exit());
}
@@ -3810,7 +3925,8 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
Label slow;
- Register reg = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(instr->InputAt(0));
+ Register dst = ToRegister(instr->result());
DoubleRegister dbl_scratch = double_scratch0();
SwVfpRegister flt_scratch = dbl_scratch.low();
@@ -3821,14 +3937,16 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// disagree. Try to allocate a heap number in new space and store
// the value in there. If that fails, call the runtime system.
Label done;
- __ SmiUntag(reg);
- __ eor(reg, reg, Operand(0x80000000));
- __ vmov(flt_scratch, reg);
+ if (dst.is(src)) {
+ __ SmiUntag(src, dst);
+ __ eor(src, src, Operand(0x80000000));
+ }
+ __ vmov(flt_scratch, src);
__ vcvt_f64_s32(dbl_scratch, flt_scratch);
if (FLAG_inline_new) {
__ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(r5, r3, r4, r6, &slow);
- if (!reg.is(r5)) __ mov(reg, r5);
+ __ Move(dst, r5);
__ b(&done);
}
@@ -3839,16 +3957,16 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// register is stored, as this register is in the pointer map, but contains an
// integer value.
__ mov(ip, Operand(0));
- __ StoreToSafepointRegisterSlot(ip, reg);
+ __ StoreToSafepointRegisterSlot(ip, dst);
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
- if (!reg.is(r0)) __ mov(reg, r0);
+ __ Move(dst, r0);
// Done. Put the value in dbl_scratch into the value of the allocated heap
// number.
__ bind(&done);
- __ sub(ip, reg, Operand(kHeapObjectTag));
+ __ sub(ip, dst, Operand(kHeapObjectTag));
__ vstr(dbl_scratch, ip, HeapNumber::kValueOffset);
- __ StoreToSafepointRegisterSlot(reg, reg);
+ __ StoreToSafepointRegisterSlot(dst, dst);
}
@@ -3896,23 +4014,21 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
- __ SmiTag(ToRegister(input));
+ __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register input = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
if (instr->needs_check()) {
STATIC_ASSERT(kHeapObjectTag == 1);
// If the input is a HeapObject, SmiUntag will set the carry flag.
- __ SmiUntag(ToRegister(input), SetCC);
+ __ SmiUntag(result, input, SetCC);
DeoptimizeIf(cs, instr->environment());
} else {
- __ SmiUntag(ToRegister(input));
+ __ SmiUntag(result, input);
}
}
@@ -3920,6 +4036,7 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
void LCodeGen::EmitNumberUntagD(Register input_reg,
DoubleRegister result_reg,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env) {
Register scratch = scratch0();
SwVfpRegister flt_scratch = double_scratch0().low();
@@ -3928,7 +4045,7 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
Label load_smi, heap_number, done;
// Smi check.
- __ JumpIfSmi(input_reg, &load_smi);
+ __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
// Heap number map check.
__ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
@@ -3955,14 +4072,21 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
// Heap number to double register conversion.
__ sub(ip, input_reg, Operand(kHeapObjectTag));
__ vldr(result_reg, ip, HeapNumber::kValueOffset);
+ if (deoptimize_on_minus_zero) {
+ __ vmov(ip, result_reg.low());
+ __ cmp(ip, Operand(0));
+ __ b(ne, &done);
+ __ vmov(ip, result_reg.high());
+ __ cmp(ip, Operand(HeapNumber::kSignMask));
+ DeoptimizeIf(eq, env);
+ }
__ jmp(&done);
// Smi to double register conversion
__ bind(&load_smi);
- __ SmiUntag(input_reg); // Untag smi before converting to float.
- __ vmov(flt_scratch, input_reg);
+ // scratch: untagged value of input_reg
+ __ vmov(flt_scratch, scratch);
__ vcvt_f64_s32(result_reg, flt_scratch);
- __ SmiTag(input_reg); // Retag smi.
__ bind(&done);
}
@@ -4088,6 +4212,7 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
EmitNumberUntagD(input_reg, result_reg,
instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment());
}
@@ -4185,21 +4310,50 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
- ASSERT(instr->InputAt(0)->IsRegister());
- Register reg = ToRegister(instr->InputAt(0));
- __ cmp(reg, Operand(instr->hydrogen()->target()));
+ Register reg = ToRegister(instr->value());
+ Handle<JSFunction> target = instr->hydrogen()->target();
+ if (isolate()->heap()->InNewSpace(*target)) {
+ Register reg = ToRegister(instr->value());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(target);
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
+ __ cmp(reg, ip);
+ } else {
+ __ cmp(reg, Operand(target));
+ }
DeoptimizeIf(ne, instr->environment());
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Register scratch,
+ Handle<Map> map,
+ CompareMapMode mode,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMap(reg, scratch, map, &success, mode);
+ DeoptimizeIf(ne, env);
+ __ bind(&success);
+}
+
+
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ cmp(scratch, Operand(instr->hydrogen()->map()));
- DeoptimizeIf(ne, instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, scratch, map, &success, REQUIRE_EXACT_MAP);
+ __ b(eq, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4226,7 +4380,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
Label is_smi, done, heap_number;
// Both smi and heap number cases are handled.
- __ JumpIfSmi(input_reg, &is_smi);
+ __ UntagAndJumpIfSmi(result_reg, input_reg, &is_smi);
// Check for heap number
__ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
@@ -4249,26 +4403,12 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
// smi
__ bind(&is_smi);
- __ SmiUntag(result_reg, input_reg);
__ ClampUint8(result_reg, result_reg);
__ bind(&done);
}
-void LCodeGen::LoadHeapObject(Register result,
- Handle<HeapObject> object) {
- if (heap()->InNewSpace(*object)) {
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(object);
- __ mov(result, Operand(cell));
- __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
- } else {
- __ mov(result, Operand(object));
- }
-}
-
-
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Register temp1 = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
@@ -4277,36 +4417,135 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
- LoadHeapObject(temp1, current_prototype);
+ __ LoadHeapObject(temp1, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
- __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
- __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
- DeoptimizeIf(ne, instr->environment());
+ DoCheckMapCommon(temp1, temp2,
+ Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
- LoadHeapObject(temp1, current_prototype);
+ __ LoadHeapObject(temp1, current_prototype);
}
// Check the holder map.
- __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
- __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
+ DoCheckMapCommon(temp1, temp2,
+ Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
DeoptimizeIf(ne, instr->environment());
}
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->TempAt(1));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ scratch,
+ scratch2,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(map, constructor);
+ __ ldr(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ str(map, FieldMemOperand(result, JSObject::kMapOffset));
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ str(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
+ __ str(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ str(scratch, FieldMemOperand(result, property_offset));
+ }
+ }
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ mov(result, Operand(0));
+
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ mov(r0, Operand(Smi::FromInt(instance_size)));
+ __ push(r0);
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
+ __ StoreToSafepointRegisterSlot(r0, result);
+}
+
+
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
- ASSERT_EQ(2, constant_elements->length());
- ElementsKind constant_elements_kind =
- static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ Heap* heap = isolate()->heap();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate_elements_kind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to FAST_ELEMENTS.
+ if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ __ LoadHeapObject(r1, instr->hydrogen()->boilerplate_object());
+ // Load map into r2.
+ __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ ldrb(r2, FieldMemOperand(r2, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ ubfx(r2, r2, Map::kElementsKindShift, Map::kElementsKindBitCount);
+ __ cmp(r2, Operand(boilerplate_elements_kind));
+ DeoptimizeIf(ne, instr->environment());
+ }
__ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ mov(r1, Operand(constant_elements));
+ // Boilerplate already exists, constant elements are never accessed.
+ // Pass an empty fixed array.
+ __ mov(r1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
__ Push(r3, r2, r1);
// Pick the right runtime function or stub to call.
@@ -4323,28 +4562,164 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset) {
+ ASSERT(!source.is(r2));
+ ASSERT(!result.is(r2));
+
+ // Only elements backing stores for non-COW arrays need to be copied.
+ Handle<FixedArrayBase> elements(object->elements());
+ bool has_elements = elements->length() > 0 &&
+ elements->map() != isolate()->heap()->fixed_cow_array_map();
+
+ // Increase the offset so that subsequent objects end up right after
+ // this object and its backing store.
+ int object_offset = *offset;
+ int object_size = object->map()->instance_size();
+ int elements_offset = *offset + object_size;
+ int elements_size = has_elements ? elements->Size() : 0;
+ *offset += object_size + elements_size;
+
+ // Copy object header.
+ ASSERT(object->properties()->length() == 0);
+ int inobject_properties = object->map()->inobject_properties();
+ int header_size = object_size - inobject_properties * kPointerSize;
+ for (int i = 0; i < header_size; i += kPointerSize) {
+ if (has_elements && i == JSObject::kElementsOffset) {
+ __ add(r2, result, Operand(elements_offset));
+ } else {
+ __ ldr(r2, FieldMemOperand(source, i));
+ }
+ __ str(r2, FieldMemOperand(result, object_offset + i));
+ }
+
+ // Copy in-object properties.
+ for (int i = 0; i < inobject_properties; i++) {
+ int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
+ Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ add(r2, result, Operand(*offset));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ } else {
+ __ mov(r2, Operand(value));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ }
+ }
+
+ if (has_elements) {
+ // Copy elements backing store header.
+ __ LoadHeapObject(source, elements);
+ for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
+ __ ldr(r2, FieldMemOperand(source, i));
+ __ str(r2, FieldMemOperand(result, elements_offset + i));
+ }
+
+ // Copy elements backing store content.
+ int elements_length = has_elements ? elements->length() : 0;
+ if (elements->IsFixedDoubleArray()) {
+ Handle<FixedDoubleArray> double_array =
+ Handle<FixedDoubleArray>::cast(elements);
+ for (int i = 0; i < elements_length; i++) {
+ int64_t value = double_array->get_representation(i);
+ // We only support little endian mode...
+ int32_t value_low = value & 0xFFFFFFFF;
+ int32_t value_high = value >> 32;
+ int total_offset =
+ elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
+ __ mov(r2, Operand(value_low));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ __ mov(r2, Operand(value_high));
+ __ str(r2, FieldMemOperand(result, total_offset + 4));
+ }
+ } else if (elements->IsFixedArray()) {
+ for (int i = 0; i < elements_length; i++) {
+ int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
+ Handle<Object> value = JSObject::GetElement(object, i);
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ add(r2, result, Operand(*offset));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(r2, Handle<HeapObject>::cast(value));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ } else {
+ __ mov(r2, Operand(value));
+ __ str(r2, FieldMemOperand(result, total_offset));
+ }
+ }
+ } else {
+ UNREACHABLE();
+ }
+ }
+}
+
+
+void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
+ int size = instr->hydrogen()->total_size();
+
+ // Allocate all objects that are part of the literal in one big
+ // allocation. This avoids multiple limit checks.
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ mov(r0, Operand(Smi::FromInt(size)));
+ __ push(r0);
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+
+ __ bind(&allocated);
+ int offset = 0;
+ __ LoadHeapObject(r1, instr->hydrogen()->boilerplate());
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), r0, r1, &offset);
+ ASSERT_EQ(size, offset);
+}
+
+
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
- __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> constant_properties =
+ instr->hydrogen()->constant_properties();
+
+ // Set up the parameters to the stub/runtime call.
+ __ LoadHeapObject(r4, literals);
__ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
- __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
+ __ mov(r2, Operand(constant_properties));
+ int flags = instr->hydrogen()->fast_elements()
+ ? ObjectLiteral::kFastElements
+ : ObjectLiteral::kNoFlags;
+ __ mov(r1, Operand(Smi::FromInt(flags)));
__ Push(r4, r3, r2, r1);
- // Pick the right runtime function to call.
+ // Pick the right runtime function or stub to call.
+ int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@@ -4417,7 +4792,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0) {
- FastNewClosureStub stub(shared_info->strict_mode_flag());
+ FastNewClosureStub stub(shared_info->language_mode());
__ mov(r1, Operand(shared_info));
__ push(r1);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -4558,9 +4933,29 @@ void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
}
+void LCodeGen::EnsureSpaceForLazyDeopt() {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ int patch_size = Deoptimizer::patch_size();
+ if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
+ ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
+ while (padding_size > 0) {
+ __ nop();
+ padding_size -= Assembler::kInstrSize;
+ }
+ }
+ last_lazy_deopt_pc_ = masm()->pc_offset();
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
- // No code for lazy bailout instruction. Used to capture environment after a
- // call for populating the safepoint data with deoptimization data.
+ EnsureSpaceForLazyDeopt();
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4577,12 +4972,9 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
__ Push(object, key, strict);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
}
@@ -4593,27 +4985,20 @@ void LCodeGen::DoIn(LIn* instr) {
__ Push(key, obj);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
}
void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
- {
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
- RegisterLazyDeoptimization(
- instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- }
-
- // The gap code includes the restoring of the safepoint registers.
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
+ RecordSafepointWithLazyDeopt(
+ instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4628,6 +5013,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ // There is no LLazyBailout instruction for stack-checks. We have to
+ // prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
@@ -4636,7 +5025,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ b(hs, &done);
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ EnsureSpaceForLazyDeopt();
__ bind(&done);
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
@@ -4645,8 +5037,13 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(lo, deferred_stack_check->entry());
+ EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ // Don't record a deoptimization index for the safepoint here.
+ // This will be done explicitly when emitting call and the safepoint in
+ // the deferred code.
}
}
@@ -4662,12 +5059,94 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
// If the environment were already registered, we would have no way of
// backpatching it with the spill slot operands.
ASSERT(!environment->HasBeenRegistered());
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(osr_pc_offset_ == -1);
osr_pc_offset_ = masm()->pc_offset();
}
+void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ __ cmp(r0, ip);
+ DeoptimizeIf(eq, instr->environment());
+
+ Register null_value = r5;
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
+ __ cmp(r0, null_value);
+ DeoptimizeIf(eq, instr->environment());
+
+ __ tst(r0, Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
+
+ STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+ __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
+ DeoptimizeIf(le, instr->environment());
+
+ Label use_cache, call_runtime;
+ __ CheckEnumCache(null_value, &call_runtime);
+
+ __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ b(&use_cache);
+
+ // Get the set of properties to enumerate.
+ __ bind(&call_runtime);
+ __ push(r0);
+ CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
+
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kMetaMapRootIndex);
+ __ cmp(r1, ip);
+ DeoptimizeIf(ne, instr->environment());
+ __ bind(&use_cache);
+}
+
+
+void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
+ Register map = ToRegister(instr->map());
+ Register result = ToRegister(instr->result());
+ __ LoadInstanceDescriptors(map, result);
+ __ ldr(result,
+ FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ __ ldr(result,
+ FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
+ __ cmp(result, Operand(0));
+ DeoptimizeIf(eq, instr->environment());
+}
+
+
+void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
+ Register object = ToRegister(instr->value());
+ Register map = ToRegister(instr->map());
+ __ ldr(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
+ __ cmp(map, scratch0());
+ DeoptimizeIf(ne, instr->environment());
+}
+
+
+void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
+ Register object = ToRegister(instr->object());
+ Register index = ToRegister(instr->index());
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+
+ Label out_of_object, done;
+ __ cmp(index, Operand(0));
+ __ b(lt, &out_of_object);
+
+ STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
+ __ add(scratch, object, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
+
+ __ b(&done);
+
+ __ bind(&out_of_object);
+ __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
+ // Index is equal to negated out of object property index plus 1.
+ __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(result, FieldMemOperand(scratch,
+ FixedArray::kHeaderSize - kPointerSize));
+ __ bind(&done);
+}
#undef __
diff --git a/src/3rdparty/v8/src/arm/lithium-codegen-arm.h b/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
index b01e496..adb6e1b 100644
--- a/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
+++ b/src/3rdparty/v8/src/arm/lithium-codegen-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,6 +58,7 @@ class LCodeGen BASE_EMBEDDED {
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
+ last_lazy_deopt_pc_(0),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -92,6 +93,9 @@ class LCodeGen BASE_EMBEDDED {
// Returns a MemOperand pointing to the high word of a DoubleStackSlot.
MemOperand ToHighMemOperand(LOperand* op) const;
+ bool IsInteger32(LConstantOperand* op) const;
+ Handle<Object> ToHandle(LConstantOperand* op) const;
+
// Try to generate code for the entire chunk, but it may fail if the
// chunk contains constructs we cannot handle. Returns true if the
// code generation attempt succeeded.
@@ -110,10 +114,15 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
- void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
+ void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check);
+
+ void DoCheckMapCommon(Register reg, Register scratch, Handle<Map> map,
+ CompareMapMode mode, LEnvironment* env);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -141,7 +150,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
StrictModeFlag strict_mode_flag() const {
- return info()->strict_mode_flag();
+ return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
LChunk* chunk() const { return chunk_; }
@@ -149,7 +158,7 @@ class LCodeGen BASE_EMBEDDED {
HGraph* graph() const { return chunk_->graph(); }
Register scratch0() { return r9; }
- DwVfpRegister double_scratch0() { return d15; }
+ DwVfpRegister double_scratch0() { return kScratchDoubleReg; }
int GetNextEmittedBlock(int block);
LInstruction* GetNextInstruction();
@@ -215,10 +224,11 @@ class LCodeGen BASE_EMBEDDED {
void LoadHeapObject(Register result, Handle<HeapObject> object);
- void RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode);
+ void RecordSafepointWithLazyDeopt(LInstruction* instr,
+ SafepointMode safepoint_mode);
- void RegisterEnvironmentForDeoptimization(LEnvironment* environment);
+ void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
void AddToTranslation(Translation* translation,
@@ -240,6 +250,7 @@ class LCodeGen BASE_EMBEDDED {
void DoMathSqrt(LUnaryMathOperation* instr);
void DoMathPowHalf(LUnaryMathOperation* instr);
void DoMathLog(LUnaryMathOperation* instr);
+ void DoMathTan(LUnaryMathOperation* instr);
void DoMathCos(LUnaryMathOperation* instr);
void DoMathSin(LUnaryMathOperation* instr);
@@ -247,19 +258,16 @@ class LCodeGen BASE_EMBEDDED {
void RecordSafepoint(LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index);
- void RecordSafepoint(LPointerMap* pointers, int deoptimization_index);
- void RecordSafepoint(int deoptimization_index);
+ Safepoint::DeoptMode mode);
+ void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
+ void RecordSafepoint(Safepoint::DeoptMode mode);
void RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordPosition(int position);
- int LastSafepointEnd() {
- return static_cast<int>(safepoints_.GetPcAfterGap());
- }
static Condition TokenToCondition(Token::Value op, bool is_unsigned);
void EmitGoto(int block);
@@ -267,6 +275,7 @@ class LCodeGen BASE_EMBEDDED {
void EmitNumberUntagD(Register input,
DoubleRegister result,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
@@ -285,6 +294,13 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsString(x). Preserves input register.
+ // Returns the condition on which a final split to
+ // true and false label should be made, to optimize fallthrough.
+ Condition EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string);
+
// Emits optimized code for %_IsConstructCall().
// Caller should branch on equal condition.
void EmitIsConstructCall(Register temp1, Register temp2);
@@ -294,6 +310,13 @@ class LCodeGen BASE_EMBEDDED {
Handle<Map> type,
Handle<String> name);
+ // Emits optimized code to deep-copy the contents of statically known
+ // object graphs (e.g. object literal boilerplate).
+ void EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset);
+
struct JumpTableEntry {
explicit inline JumpTableEntry(Address entry)
: label(),
@@ -302,6 +325,8 @@ class LCodeGen BASE_EMBEDDED {
Address address;
};
+ void EnsureSpaceForLazyDeopt();
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -318,6 +343,7 @@ class LCodeGen BASE_EMBEDDED {
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
int osr_pc_offset_;
+ int last_lazy_deopt_pc_;
// Builder that keeps track of safepoints in the code. The table
// itself is emitted at the end of the generated code.
@@ -388,7 +414,7 @@ class LDeferredCode: public ZoneObject {
virtual void Generate() = 0;
virtual LInstruction* instr() = 0;
- void SetExit(Label *exit) { external_exit_ = exit; }
+ void SetExit(Label* exit) { external_exit_ = exit; }
Label* entry() { return &entry_; }
Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
int instruction_index() const { return instruction_index_; }
diff --git a/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc b/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
index 26f60fa..cefca47 100644
--- a/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
+++ b/src/3rdparty/v8/src/arm/lithium-gap-resolver-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,7 +34,6 @@ namespace v8 {
namespace internal {
static const Register kSavedValueRegister = { 9 };
-static const DoubleRegister kSavedDoubleValueRegister = { 0 };
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner), moves_(32), root_index_(0), in_cycle_(false),
@@ -172,9 +171,9 @@ void LGapResolver::BreakCycle(int index) {
} else if (source->IsStackSlot()) {
__ ldr(kSavedValueRegister, cgen_->ToMemOperand(source));
} else if (source->IsDoubleRegister()) {
- __ vmov(kSavedDoubleValueRegister, cgen_->ToDoubleRegister(source));
+ __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
} else if (source->IsDoubleStackSlot()) {
- __ vldr(kSavedDoubleValueRegister, cgen_->ToMemOperand(source));
+ __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source));
} else {
UNREACHABLE();
}
@@ -193,11 +192,9 @@ void LGapResolver::RestoreValue() {
} else if (saved_destination_->IsStackSlot()) {
__ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
} else if (saved_destination_->IsDoubleRegister()) {
- __ vmov(cgen_->ToDoubleRegister(saved_destination_),
- kSavedDoubleValueRegister);
+ __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
} else if (saved_destination_->IsDoubleStackSlot()) {
- __ vstr(kSavedDoubleValueRegister,
- cgen_->ToMemOperand(saved_destination_));
+ __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
} else {
UNREACHABLE();
}
@@ -235,8 +232,8 @@ void LGapResolver::EmitMove(int index) {
// ip is overwritten while saving the value to the destination.
// Therefore we can't use ip. It is OK if the read from the source
// destroys ip, since that happens before the value is read.
- __ vldr(kSavedDoubleValueRegister.low(), source_operand);
- __ vstr(kSavedDoubleValueRegister.low(), destination_operand);
+ __ vldr(kScratchDoubleReg.low(), source_operand);
+ __ vstr(kScratchDoubleReg.low(), destination_operand);
} else {
__ ldr(ip, source_operand);
__ str(ip, destination_operand);
@@ -248,13 +245,24 @@ void LGapResolver::EmitMove(int index) {
}
} else if (source->IsConstantOperand()) {
- Operand source_operand = cgen_->ToOperand(source);
+ LConstantOperand* constant_source = LConstantOperand::cast(source);
if (destination->IsRegister()) {
- __ mov(cgen_->ToRegister(destination), source_operand);
+ Register dst = cgen_->ToRegister(destination);
+ if (cgen_->IsInteger32(constant_source)) {
+ __ mov(dst, Operand(cgen_->ToInteger32(constant_source)));
+ } else {
+ __ LoadObject(dst, cgen_->ToHandle(constant_source));
+ }
} else {
ASSERT(destination->IsStackSlot());
ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone.
- __ mov(kSavedValueRegister, source_operand);
+ if (cgen_->IsInteger32(constant_source)) {
+ __ mov(kSavedValueRegister,
+ Operand(cgen_->ToInteger32(constant_source)));
+ } else {
+ __ LoadObject(kSavedValueRegister,
+ cgen_->ToHandle(constant_source));
+ }
__ str(kSavedValueRegister, cgen_->ToMemOperand(destination));
}
@@ -286,8 +294,8 @@ void LGapResolver::EmitMove(int index) {
__ ldr(kSavedValueRegister, source_high_operand);
__ str(kSavedValueRegister, destination_high_operand);
} else {
- __ vldr(kSavedDoubleValueRegister, source_operand);
- __ vstr(kSavedDoubleValueRegister, destination_operand);
+ __ vldr(kScratchDoubleReg, source_operand);
+ __ vstr(kScratchDoubleReg, destination_operand);
}
}
} else {
diff --git a/src/3rdparty/v8/src/arm/macro-assembler-arm.cc b/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
index 90bad75..857c2bf 100644
--- a/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/macro-assembler-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -396,14 +396,27 @@ void MacroAssembler::Usat(Register dst, int satpos, const Operand& src,
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
- ldr(destination, MemOperand(roots, index << kPointerSizeLog2), cond);
+ ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
}
void MacroAssembler::StoreRoot(Register source,
Heap::RootListIndex index,
Condition cond) {
- str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
+ str(source, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
+}
+
+
+void MacroAssembler::LoadHeapObject(Register result,
+ Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ mov(result, Operand(cell));
+ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
+ } else {
+ mov(result, Operand(object));
+ }
}
@@ -483,6 +496,12 @@ void MacroAssembler::RecordWrite(Register object,
// registers are cp.
ASSERT(!address.is(cp) && !value.is(cp));
+ if (emit_debug_code()) {
+ ldr(ip, MemOperand(address));
+ cmp(ip, value);
+ Check(eq, "Wrong address or value passed to RecordWrite");
+ }
+
Label done;
if (smi_check == INLINE_SMI_CHECK) {
@@ -529,7 +548,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
SaveFPRegsMode fp_mode,
RememberedSetFinalAction and_then) {
Label done;
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
JumpIfNotInNewSpace(object, scratch, &ok);
stop("Remembered set pointer is in new space");
@@ -798,12 +817,12 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) {
- // Setup the frame structure on the stack.
+ // Set up the frame structure on the stack.
ASSERT_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
ASSERT_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
ASSERT_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
Push(lr, fp);
- mov(fp, Operand(sp)); // Setup new frame pointer.
+ mov(fp, Operand(sp)); // Set up new frame pointer.
// Reserve room for saved entry sp and code object.
sub(sp, sp, Operand(2 * kPointerSize));
if (emit_debug_code()) {
@@ -938,10 +957,12 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Handle<Code> code_constant,
Register code_reg,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
bool definitely_matches = false;
+ *definitely_mismatches = false;
Label regular_invoke;
// Check whether the expected and actual arguments count match. If not,
@@ -972,6 +993,7 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
// arguments.
definitely_matches = true;
} else {
+ *definitely_mismatches = true;
mov(r2, Operand(expected.immediate()));
}
}
@@ -999,7 +1021,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
SetCallKind(r5, call_kind);
Call(adaptor);
call_wrapper.AfterCall();
- b(done);
+ if (!*definitely_mismatches) {
+ b(done);
+ }
} else {
SetCallKind(r5, call_kind);
Jump(adaptor, RelocInfo::CODE_TARGET);
@@ -1019,23 +1043,26 @@ void MacroAssembler::InvokeCode(Register code,
ASSERT(flag == JUMP_FUNCTION || has_frame());
Label done;
-
- InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
+ bool definitely_mismatches = false;
+ InvokePrologue(expected, actual, Handle<Code>::null(), code,
+ &done, &definitely_mismatches, flag,
call_wrapper, call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code));
- SetCallKind(r5, call_kind);
- Call(code);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(r5, call_kind);
- Jump(code);
- }
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code));
+ SetCallKind(r5, call_kind);
+ Call(code);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(r5, call_kind);
+ Jump(code);
+ }
- // Continue here if InvokePrologue does handle the invocation due to
- // mismatched parameter counts.
- bind(&done);
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
+ }
}
@@ -1049,20 +1076,23 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
ASSERT(flag == JUMP_FUNCTION || has_frame());
Label done;
-
- InvokePrologue(expected, actual, code, no_reg, &done, flag,
+ bool definitely_mismatches = false;
+ InvokePrologue(expected, actual, code, no_reg,
+ &done, &definitely_mismatches, flag,
NullCallWrapper(), call_kind);
- if (flag == CALL_FUNCTION) {
- SetCallKind(r5, call_kind);
- Call(code, rmode);
- } else {
- SetCallKind(r5, call_kind);
- Jump(code, rmode);
- }
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ SetCallKind(r5, call_kind);
+ Call(code, rmode);
+ } else {
+ SetCallKind(r5, call_kind);
+ Jump(code, rmode);
+ }
- // Continue here if InvokePrologue does handle the invocation due to
- // mismatched parameter counts.
- bind(&done);
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
+ }
}
@@ -1097,12 +1127,13 @@ void MacroAssembler::InvokeFunction(Register fun,
void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
const ParameterCount& actual,
InvokeFlag flag,
+ const CallWrapper& call_wrapper,
CallKind call_kind) {
// You can't call a function without a valid frame.
ASSERT(flag == JUMP_FUNCTION || has_frame());
// Get the function and setup the context.
- mov(r1, Operand(function));
+ LoadHeapObject(r1, function);
ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
ParameterCount expected(function->shared()->formal_parameter_count());
@@ -1110,7 +1141,7 @@ void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
// allow recompilation to take effect without changing any of the
// call sites.
ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
- InvokeCode(r3, expected, actual, flag, NullCallWrapper(), call_kind);
+ InvokeCode(r3, expected, actual, flag, call_wrapper, call_kind);
}
@@ -1157,47 +1188,40 @@ void MacroAssembler::DebugBreak() {
#endif
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type) {
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
+ int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
-
- // The pc (return address) is passed in register lr.
- if (try_location == IN_JAVASCRIPT) {
- if (type == TRY_CATCH_HANDLER) {
- mov(r3, Operand(StackHandler::TRY_CATCH));
- } else {
- mov(r3, Operand(StackHandler::TRY_FINALLY));
- }
- stm(db_w, sp, r3.bit() | cp.bit() | fp.bit() | lr.bit());
- // Save the current handler as the next handler.
- mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
- ldr(r1, MemOperand(r3));
- push(r1);
- // Link this handler as the new current one.
- str(sp, MemOperand(r3));
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // For the JSEntry handler, we must preserve r0-r4, r5-r7 are available.
+ // We will build up the handler from the bottom by pushing on the stack.
+ // Set up the code object (r5) and the state (r6) for pushing.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
+ mov(r5, Operand(CodeObject()));
+ mov(r6, Operand(state));
+
+ // Push the frame pointer, context, state, and code object.
+ if (kind == StackHandler::JS_ENTRY) {
+ mov(r7, Operand(Smi::FromInt(0))); // Indicates no context.
+ mov(ip, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
+ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | ip.bit());
} else {
- // Must preserve r0-r4, r5-r7 are available.
- ASSERT(try_location == IN_JS_ENTRY);
- // The frame pointer does not point to a JS frame so we save NULL
- // for fp. We expect the code throwing an exception to check fp
- // before dereferencing it to restore the context.
- mov(r5, Operand(StackHandler::ENTRY)); // State.
- mov(r6, Operand(Smi::FromInt(0))); // Indicates no context.
- mov(r7, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
- stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | lr.bit());
- // Save the current handler as the next handler.
- mov(r7, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
- ldr(r6, MemOperand(r7));
- push(r6);
- // Link this handler as the new current one.
- str(sp, MemOperand(r7));
+ stm(db_w, sp, r5.bit() | r6.bit() | cp.bit() | fp.bit());
}
+
+ // Link the current handler as the next handler.
+ mov(r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
+ ldr(r5, MemOperand(r6));
+ push(r5);
+ // Set this new handler as the current one.
+ str(sp, MemOperand(r6));
}
@@ -1210,111 +1234,90 @@ void MacroAssembler::PopTryHandler() {
}
+void MacroAssembler::JumpToHandlerEntry() {
+ // Compute the handler entry address and jump to it. The handler table is
+ // a fixed array of (smi-tagged) code offsets.
+ // r0 = exception, r1 = code object, r2 = state.
+ ldr(r3, FieldMemOperand(r1, Code::kHandlerTableOffset)); // Handler table.
+ add(r3, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ mov(r2, Operand(r2, LSR, StackHandler::kKindWidth)); // Handler index.
+ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2)); // Smi-tagged offset.
+ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
+ add(pc, r1, Operand(r2, ASR, kSmiTagSize)); // Jump.
+}
+
+
void MacroAssembler::Throw(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // r0 is expected to hold the exception.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in r0.
if (!value.is(r0)) {
mov(r0, value);
}
-
- // Drop the sp to the top of the handler.
+ // Drop the stack pointer to the top of the top handler.
mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
ldr(sp, MemOperand(r3));
-
// Restore the next handler.
pop(r2);
str(r2, MemOperand(r3));
- // Restore context and frame pointer, discard state (r3).
- ldm(ia_w, sp, r3.bit() | cp.bit() | fp.bit());
+ // Get the code object (r1) and state (r2). Restore the context and frame
+ // pointer.
+ ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit());
// If the handler is a JS frame, restore the context to the frame.
- // (r3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any
- // of them.
- cmp(r3, Operand(StackHandler::ENTRY));
+ // (kind == ENTRY) == (fp == 0) == (cp == 0), so we could test either fp
+ // or cp.
+ tst(cp, cp);
str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
-#ifdef DEBUG
- if (emit_debug_code()) {
- mov(lr, Operand(pc));
- }
-#endif
- pop(pc);
+ JumpToHandlerEntry();
}
-void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
- Register value) {
+void MacroAssembler::ThrowUncatchable(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // r0 is expected to hold the exception.
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in r0.
if (!value.is(r0)) {
mov(r0, value);
}
-
- // Drop sp to the top stack handler.
+ // Drop the stack pointer to the top of the top stack handler.
mov(r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
ldr(sp, MemOperand(r3));
// Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- ldr(r2, MemOperand(sp, kStateOffset));
- cmp(r2, Operand(StackHandler::ENTRY));
- b(eq, &done);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- ldr(sp, MemOperand(sp, kNextOffset));
- jmp(&loop);
- bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
+ Label fetch_next, check_kind;
+ jmp(&check_kind);
+ bind(&fetch_next);
+ ldr(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
+
+ bind(&check_kind);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
+ ldr(r2, MemOperand(sp, StackHandlerConstants::kStateOffset));
+ tst(r2, Operand(StackHandler::KindField::kMask));
+ b(ne, &fetch_next);
+
+ // Set the top handler address to next handler past the top ENTRY handler.
pop(r2);
str(r2, MemOperand(r3));
+ // Get the code object (r1) and state (r2). Clear the context and frame
+ // pointer (0 was saved in the handler).
+ ldm(ia_w, sp, r1.bit() | r2.bit() | cp.bit() | fp.bit());
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(
- Isolate::kExternalCaughtExceptionAddress, isolate());
- mov(r0, Operand(false, RelocInfo::NONE));
- mov(r2, Operand(external_caught));
- str(r0, MemOperand(r2));
-
- // Set pending exception and r0 to out of memory exception.
- Failure* out_of_memory = Failure::OutOfMemoryException();
- mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate())));
- str(r0, MemOperand(r2));
- }
-
- // Stack layout at this point. See also StackHandlerConstants.
- // sp -> state (ENTRY)
- // cp
- // fp
- // lr
-
- // Restore context and frame pointer, discard state (r2).
- ldm(ia_w, sp, r2.bit() | cp.bit() | fp.bit());
-#ifdef DEBUG
- if (emit_debug_code()) {
- mov(lr, Operand(pc));
- }
-#endif
- pop(pc);
+ JumpToHandlerEntry();
}
@@ -1395,6 +1398,35 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
+ // First of all we assign the hash seed to scratch.
+ LoadRoot(scratch, Heap::kHashSeedRootIndex);
+ SmiUntag(scratch);
+
+ // Xor original key with a seed.
+ eor(t0, t0, Operand(scratch));
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ mvn(scratch, Operand(t0));
+ add(t0, scratch, Operand(t0, LSL, 15));
+ // hash = hash ^ (hash >> 12);
+ eor(t0, t0, Operand(t0, LSR, 12));
+ // hash = hash + (hash << 2);
+ add(t0, t0, Operand(t0, LSL, 2));
+ // hash = hash ^ (hash >> 4);
+ eor(t0, t0, Operand(t0, LSR, 4));
+ // hash = hash * 2057;
+ mov(scratch, Operand(t0, LSL, 11));
+ add(t0, t0, Operand(t0, LSL, 3));
+ add(t0, t0, scratch);
+ // hash = hash ^ (hash >> 16);
+ eor(t0, t0, Operand(t0, LSR, 16));
+}
+
+
void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Register elements,
Register key,
@@ -1424,26 +1456,10 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// t2 - used for the index into the dictionary.
Label done;
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- mvn(t1, Operand(t0));
- add(t0, t1, Operand(t0, LSL, 15));
- // hash = hash ^ (hash >> 12);
- eor(t0, t0, Operand(t0, LSR, 12));
- // hash = hash + (hash << 2);
- add(t0, t0, Operand(t0, LSL, 2));
- // hash = hash ^ (hash >> 4);
- eor(t0, t0, Operand(t0, LSR, 4));
- // hash = hash * 2057;
- mov(t1, Operand(2057));
- mul(t0, t0, t1);
- // hash = hash ^ (hash >> 16);
- eor(t0, t0, Operand(t0, LSR, 16));
+ GetNumberHash(t0, t1);
// Compute the capacity mask.
- ldr(t1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
+ ldr(t1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
mov(t1, Operand(t1, ASR, kSmiTagSize)); // convert smi to int
sub(t1, t1, Operand(1));
@@ -1454,17 +1470,17 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
mov(t2, t0);
// Compute the masked index: (hash + i + i * i) & mask.
if (i > 0) {
- add(t2, t2, Operand(NumberDictionary::GetProbeOffset(i)));
+ add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
}
and_(t2, t2, Operand(t1));
// Scale the index by multiplying by the element size.
- ASSERT(NumberDictionary::kEntrySize == 3);
+ ASSERT(SeededNumberDictionary::kEntrySize == 3);
add(t2, t2, Operand(t2, LSL, 1)); // t2 = t2 * 3
// Check if the key is identical to the name.
add(t2, elements, Operand(t2, LSL, kPointerSizeLog2));
- ldr(ip, FieldMemOperand(t2, NumberDictionary::kElementsStartOffset));
+ ldr(ip, FieldMemOperand(t2, SeededNumberDictionary::kElementsStartOffset));
cmp(key, Operand(ip));
if (i != kProbes - 1) {
b(eq, &done);
@@ -1477,14 +1493,14 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// Check that the value is a normal property.
// t2: elements + (index * kPointerSize)
const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
ldr(t1, FieldMemOperand(t2, kDetailsOffset));
tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
b(ne, miss);
// Get the value at the masked, scaled index and return.
const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + kPointerSize;
ldr(result, FieldMemOperand(t2, kValueOffset));
}
@@ -1973,18 +1989,49 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
}
+void MacroAssembler::CompareMap(Register obj,
+ Register scratch,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode) {
+ ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
+ cmp(scratch, Operand(map));
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ Map* transitioned_fast_element_map(
+ map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
+ ASSERT(transitioned_fast_element_map == NULL ||
+ map->elements_kind() != FAST_ELEMENTS);
+ if (transitioned_fast_element_map != NULL) {
+ b(eq, early_success);
+ cmp(scratch, Operand(Handle<Map>(transitioned_fast_element_map)));
+ }
+
+ Map* transitioned_double_map(
+ map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
+ ASSERT(transitioned_double_map == NULL ||
+ map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+ if (transitioned_double_map != NULL) {
+ b(eq, early_success);
+ cmp(scratch, Operand(Handle<Map>(transitioned_double_map)));
+ }
+ }
+}
+
+
void MacroAssembler::CheckMap(Register obj,
Register scratch,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type) {
+ SmiCheckType smi_check_type,
+ CompareMapMode mode) {
if (smi_check_type == DO_SMI_CHECK) {
JumpIfSmi(obj, fail);
}
- ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
- mov(ip, Operand(map));
- cmp(scratch, ip);
+
+ Label success;
+ CompareMap(obj, scratch, map, &success, mode);
b(ne, fail);
+ bind(&success);
}
@@ -2327,7 +2374,7 @@ void MacroAssembler::ConvertToInt32(Register source,
b(gt, not_int32);
// We know the exponent is smaller than 30 (biased). If it is less than
- // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
+ // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, i.e.
// it rounds to zero.
const uint32_t zero_exponent = HeapNumber::kExponentBias + 0;
sub(scratch2, scratch2, Operand(zero_exponent - fudge_factor), SetCC);
@@ -2807,6 +2854,47 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
}
+void MacroAssembler::LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match) {
+ // Load the global or builtins object from the current context.
+ ldr(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+
+ // Check that the function's map is the same as the expected cached map.
+ int expected_index =
+ Context::GetContextMapIndexFromElementsKind(expected_kind);
+ ldr(ip, MemOperand(scratch, Context::SlotOffset(expected_index)));
+ cmp(map_in_out, ip);
+ b(ne, no_map_match);
+
+ // Use the transitioned cached map.
+ int trans_index =
+ Context::GetContextMapIndexFromElementsKind(transitioned_kind);
+ ldr(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+}
+
+
+void MacroAssembler::LoadInitialArrayMap(
+ Register function_in, Register scratch, Register map_out) {
+ ASSERT(!function_in.is(map_out));
+ Label done;
+ ldr(map_out, FieldMemOperand(function_in,
+ JSFunction::kPrototypeOrInitialMapOffset));
+ if (!FLAG_smi_only_arrays) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ map_out,
+ scratch,
+ &done);
+ }
+ bind(&done);
+}
+
+
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
ldr(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
@@ -2867,6 +2955,22 @@ void MacroAssembler::JumpIfNotBothSmi(Register reg1,
}
+void MacroAssembler::UntagAndJumpIfSmi(
+ Register dst, Register src, Label* smi_case) {
+ STATIC_ASSERT(kSmiTag == 0);
+ mov(dst, Operand(src, ASR, kSmiTagSize), SetCC);
+ b(cc, smi_case); // Shifter carry is not set for a smi.
+}
+
+
+void MacroAssembler::UntagAndJumpIfNotSmi(
+ Register dst, Register src, Label* non_smi_case) {
+ STATIC_ASSERT(kSmiTag == 0);
+ mov(dst, Operand(src, ASR, kSmiTagSize), SetCC);
+ b(cs, non_smi_case); // Shifter carry is set for a non-smi.
+}
+
+
void MacroAssembler::JumpIfEitherSmi(Register reg1,
Register reg2,
Label* on_either_smi) {
@@ -3441,7 +3545,7 @@ void MacroAssembler::EnsureNotWhite(
tst(mask_scratch, load_scratch);
b(ne, &done);
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
// Check for impossible bit pattern.
Label ok;
// LSL may overflow, making the check conservative.
@@ -3543,8 +3647,8 @@ void MacroAssembler::ClampDoubleToUint8(Register result_reg,
bind(&in_bounds);
Vmov(temp_double_reg, 0.5);
vadd(temp_double_reg, input_reg, temp_double_reg);
- vcvt_u32_f64(s0, temp_double_reg);
- vmov(result_reg, s0);
+ vcvt_u32_f64(temp_double_reg.low(), temp_double_reg);
+ vmov(result_reg, temp_double_reg.low());
bind(&done);
}
@@ -3560,6 +3664,52 @@ void MacroAssembler::LoadInstanceDescriptors(Register map,
}
+void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
+ Label next;
+ // Preload a couple of values used in the loop.
+ Register empty_fixed_array_value = r6;
+ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
+ Register empty_descriptor_array_value = r7;
+ LoadRoot(empty_descriptor_array_value,
+ Heap::kEmptyDescriptorArrayRootIndex);
+ mov(r1, r0);
+ bind(&next);
+
+ // Check that there are no elements. Register r1 contains the
+ // current JS object we've reached through the prototype chain.
+ ldr(r2, FieldMemOperand(r1, JSObject::kElementsOffset));
+ cmp(r2, empty_fixed_array_value);
+ b(ne, call_runtime);
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in r2 for the subsequent
+ // prototype load.
+ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
+ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
+ JumpIfSmi(r3, call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (r3). This is the case if the next enumeration
+ // index field does not contain a smi.
+ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumerationIndexOffset));
+ JumpIfSmi(r3, call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ Label check_prototype;
+ cmp(r1, r0);
+ b(eq, &check_prototype);
+ ldr(r3, FieldMemOperand(r3, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ cmp(r3, empty_fixed_array_value);
+ b(ne, call_runtime);
+
+ // Load the prototype from the map and loop if non-null.
+ bind(&check_prototype);
+ ldr(r1, FieldMemOperand(r2, Map::kPrototypeOffset));
+ cmp(r1, null_value);
+ b(ne, &next);
+}
+
+
bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
if (r1.is(r2)) return true;
if (r1.is(r3)) return true;
diff --git a/src/3rdparty/v8/src/arm/macro-assembler-arm.h b/src/3rdparty/v8/src/arm/macro-assembler-arm.h
index 6692199..47afa93 100644
--- a/src/3rdparty/v8/src/arm/macro-assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/macro-assembler-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -39,12 +39,12 @@ namespace internal {
// Static helper functions
// Generate a MemOperand for loading a field from an object.
-static inline MemOperand FieldMemOperand(Register object, int offset) {
+inline MemOperand FieldMemOperand(Register object, int offset) {
return MemOperand(object, offset - kHeapObjectTag);
}
-static inline Operand SmiUntagOperand(Register object) {
+inline Operand SmiUntagOperand(Register object) {
return Operand(object, ASR, kSmiTagSize);
}
@@ -52,7 +52,7 @@ static inline Operand SmiUntagOperand(Register object) {
// Give alias names to registers
const Register cp = { 8 }; // JavaScript context pointer
-const Register roots = { 10 }; // Roots array pointer.
+const Register kRootRegister = { 10 }; // Roots array pointer.
// Flags used for the AllocateInNewSpace functions.
enum AllocationFlags {
@@ -166,6 +166,16 @@ class MacroAssembler: public Assembler {
Heap::RootListIndex index,
Condition cond = al);
+ void LoadHeapObject(Register dst, Handle<HeapObject> object);
+
+ void LoadObject(Register result, Handle<Object> object) {
+ if (object->IsHeapObject()) {
+ LoadHeapObject(result, Handle<HeapObject>::cast(object));
+ } else {
+ Move(result, object);
+ }
+ }
+
// ---------------------------------------------------------------------------
// GC Support
@@ -233,7 +243,7 @@ class MacroAssembler: public Assembler {
Register scratch3,
Label* object_is_white_and_not_data);
- // Detects conservatively whether an object is data-only, ie it does need to
+ // Detects conservatively whether an object is data-only, i.e. it does need to
// be scanned by the garbage collector.
void JumpIfDataObject(Register value,
Register scratch,
@@ -481,6 +491,22 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
+ // Conditionally load the cached Array transitioned map of type
+ // transitioned_kind from the global context if the map in register
+ // map_in_out is the cached Array map in the global context of
+ // expected_kind.
+ void LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match);
+
+ // Load the initial map for new Arrays from a JSFunction.
+ void LoadInitialArrayMap(Register function_in,
+ Register scratch,
+ Register map_out);
+
void LoadGlobalFunction(int index, Register function);
// Load the initial map from the global function. The registers
@@ -489,10 +515,16 @@ class MacroAssembler: public Assembler {
Register map,
Register scratch);
+ void InitializeRootRegister() {
+ ExternalReference roots_array_start =
+ ExternalReference::roots_array_start(isolate());
+ mov(kRootRegister, Operand(roots_array_start));
+ }
+
// ---------------------------------------------------------------------------
// JavaScript invokes
- // Setup call kind marking in ecx. The method takes ecx as an
+ // Set up call kind marking in ecx. The method takes ecx as an
// explicit first parameter to make the code more readable at the
// call sites.
void SetCallKind(Register dst, CallKind kind);
@@ -523,6 +555,7 @@ class MacroAssembler: public Assembler {
void InvokeFunction(Handle<JSFunction> function,
const ParameterCount& actual,
InvokeFlag flag,
+ const CallWrapper& call_wrapper,
CallKind call_kind);
void IsObjectJSObjectType(Register heap_object,
@@ -549,20 +582,18 @@ class MacroAssembler: public Assembler {
// Exception handling
// Push a new try handler and link into try handler chain.
- // The return address must be passed in register lr.
- // On exit, r0 contains TOS (code slot).
- void PushTryHandler(CodeLocation try_location, HandlerType type);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.
void PopTryHandler();
- // Passes thrown value (in r0) to the handler of top of the try handler chain.
+ // Passes thrown value to the handler of top of the try handler chain.
void Throw(Register value);
// Propagates an uncatchable exception to the top of the current JS stack's
// handler chain.
- void ThrowUncatchable(UncatchableExceptionType type, Register value);
+ void ThrowUncatchable(Register value);
// ---------------------------------------------------------------------------
// Inline caching support
@@ -574,6 +605,7 @@ class MacroAssembler: public Assembler {
Register scratch,
Label* miss);
+ void GetNumberHash(Register t0, Register scratch);
void LoadFromNumberDictionary(Label* miss,
Register elements,
@@ -589,7 +621,7 @@ class MacroAssembler: public Assembler {
}
// Check if the given instruction is a 'type' marker.
- // ie. check if is is a mov r<type>, r<type> (referenced as nop(type))
+ // i.e. check if is is a mov r<type>, r<type> (referenced as nop(type))
// These instructions are generated to mark special location in the code,
// like some special IC code.
static inline bool IsMarkedCode(Instr instr, int type) {
@@ -769,7 +801,8 @@ class MacroAssembler: public Assembler {
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
- // the FastDoubleElements array elements, otherwise jump to fail.
+ // the FastDoubleElements array elements. Otherwise jump to fail, in which
+ // case scratch2, scratch3 and scratch4 are unmodified.
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,
@@ -780,15 +813,26 @@ class MacroAssembler: public Assembler {
Register scratch4,
Label* fail);
- // Check if the map of an object is equal to a specified map (either
- // given directly or as an index into the root list) and branch to
- // label if not. Skip the smi check if not required (object is known
- // to be a heap object)
+ // Compare an object's map with the specified map and its transitioned
+ // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. Condition flags are
+ // set with result of map compare. If multiple map compares are required, the
+ // compare sequences branches to early_success.
+ void CompareMap(Register obj,
+ Register scratch,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
+ // Check if the map of an object is equal to a specified map and branch to
+ // label if not. Skip the smi check if not required (object is known to be a
+ // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
+ // against maps that are ElementsKind transition maps of the specified map.
void CheckMap(Register obj,
Register scratch,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type);
+ SmiCheckType smi_check_type,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
void CheckMap(Register obj,
@@ -880,7 +924,7 @@ class MacroAssembler: public Assembler {
// Truncates a double using a specific rounding mode.
// Clears the z flag (ne condition) if an overflow occurs.
// If exact_conversion is true, the z flag is also cleared if the conversion
- // was inexact, ie. if the double value could not be converted exactly
+ // was inexact, i.e. if the double value could not be converted exactly
// to a 32bit integer.
void EmitVFPTruncate(VFPRoundingMode rounding_mode,
SwVfpRegister result,
@@ -997,7 +1041,7 @@ class MacroAssembler: public Assembler {
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
- // - space to be unwound on exit (includes the call js arguments space and
+ // - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
void CallApiFunctionAndReturn(ExternalReference function, int stack_space);
@@ -1115,6 +1159,14 @@ class MacroAssembler: public Assembler {
mov(dst, Operand(src, ASR, kSmiTagSize), s);
}
+ // Untag the source value into destination and jump if source is a smi.
+ // Souce and destination can be the same register.
+ void UntagAndJumpIfSmi(Register dst, Register src, Label* smi_case);
+
+ // Untag the source value into destination and jump if source is not a smi.
+ // Souce and destination can be the same register.
+ void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
+
// Jump the register contains a smi.
inline void JumpIfSmi(Register value, Label* smi_label) {
tst(value, Operand(kSmiTagMask));
@@ -1207,6 +1259,10 @@ class MacroAssembler: public Assembler {
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
+ // Expects object in r0 and returns map with validated enum cache
+ // in r0. Assumes that any other register can be used as a scratch.
+ void CheckEnumCache(Register null_value, Label* call_runtime);
+
private:
void CallCFunctionHelper(Register function,
int num_reg_arguments,
@@ -1220,6 +1276,7 @@ class MacroAssembler: public Assembler {
Handle<Code> code_constant,
Register code_reg,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind);
@@ -1243,6 +1300,10 @@ class MacroAssembler: public Assembler {
Register bitmap_reg,
Register mask_reg);
+ // Helper for throwing exceptions. Compute a handler address and jump to
+ // it. See the implementation for register usage.
+ void JumpToHandlerEntry();
+
// Compute memory operands for safepoint stack slots.
static int SafepointRegisterStackIndex(int reg_code);
MemOperand SafepointRegisterSlot(Register reg);
@@ -1296,21 +1357,16 @@ class CodePatcher {
// -----------------------------------------------------------------------------
// Static helper functions.
-static MemOperand ContextOperand(Register context, int index) {
+inline MemOperand ContextOperand(Register context, int index) {
return MemOperand(context, Context::SlotOffset(index));
}
-static inline MemOperand GlobalObjectOperand() {
+inline MemOperand GlobalObjectOperand() {
return ContextOperand(cp, Context::GLOBAL_INDEX);
}
-static inline MemOperand QmlGlobalObjectOperand() {
- return ContextOperand(cp, Context::QML_GLOBAL_INDEX);
-}
-
-
#ifdef GENERATED_CODE_COVERAGE
#define CODE_COVERAGE_STRINGIFY(x) #x
#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
diff --git a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
index b212f9f..a833624 100644
--- a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.cc
@@ -452,8 +452,12 @@ void RegExpMacroAssemblerARM::CheckNotCharacter(unsigned c,
void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ and_(r0, current_character(), Operand(mask));
- __ cmp(r0, Operand(c));
+ if (c == 0) {
+ __ tst(current_character(), Operand(mask));
+ } else {
+ __ and_(r0, current_character(), Operand(mask));
+ __ cmp(r0, Operand(c));
+ }
BranchOrBacktrack(eq, on_equal);
}
@@ -461,8 +465,12 @@ void RegExpMacroAssemblerARM::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerARM::CheckNotCharacterAfterAnd(unsigned c,
unsigned mask,
Label* on_not_equal) {
- __ and_(r0, current_character(), Operand(mask));
- __ cmp(r0, Operand(c));
+ if (c == 0) {
+ __ tst(current_character(), Operand(mask));
+ } else {
+ __ and_(r0, current_character(), Operand(mask));
+ __ cmp(r0, Operand(c));
+ }
BranchOrBacktrack(ne, on_not_equal);
}
@@ -472,7 +480,7 @@ void RegExpMacroAssemblerARM::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- ASSERT(minus < String::kMaxUC16CharCode);
+ ASSERT(minus < String::kMaxUtf16CodeUnit);
__ sub(r0, current_character(), Operand(minus));
__ and_(r0, r0, Operand(mask));
__ cmp(r0, Operand(c));
@@ -480,6 +488,44 @@ void RegExpMacroAssemblerARM::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerARM::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ sub(r0, current_character(), Operand(from));
+ __ cmp(r0, Operand(to - from));
+ BranchOrBacktrack(ls, on_in_range); // Unsigned lower-or-same condition.
+}
+
+
+void RegExpMacroAssemblerARM::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ sub(r0, current_character(), Operand(from));
+ __ cmp(r0, Operand(to - from));
+ BranchOrBacktrack(hi, on_not_in_range); // Unsigned higher condition.
+}
+
+
+void RegExpMacroAssemblerARM::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ mov(r0, Operand(table));
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ and_(r1, current_character(), Operand(kTableSize - 1));
+ __ add(r1, r1, Operand(ByteArray::kHeaderSize - kHeapObjectTag));
+ } else {
+ __ add(r1,
+ current_character(),
+ Operand(ByteArray::kHeaderSize - kHeapObjectTag));
+ }
+ __ ldrb(r0, MemOperand(r0, r1));
+ __ cmp(r0, Operand(0));
+ BranchOrBacktrack(ne, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -571,7 +617,7 @@ bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type,
ExternalReference map = ExternalReference::re_word_character_map();
__ mov(r0, Operand(map));
__ ldrb(r0, MemOperand(r0, current_character()));
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
BranchOrBacktrack(eq, on_no_match);
return true;
}
@@ -585,7 +631,7 @@ bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type,
ExternalReference map = ExternalReference::re_word_character_map();
__ mov(r0, Operand(map));
__ ldrb(r0, MemOperand(r0, current_character()));
- __ tst(r0, Operand(r0));
+ __ cmp(r0, Operand(0));
BranchOrBacktrack(ne, on_no_match);
if (mode_ != ASCII) {
__ bind(&done);
@@ -681,7 +727,7 @@ Handle<HeapObject> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Determine whether the start index is zero, that is at the start of the
// string, and store that value in a local variable.
- __ tst(r1, Operand(r1));
+ __ cmp(r1, Operand(0));
__ mov(r1, Operand(1), LeaveCC, eq);
__ mov(r1, Operand(0, RelocInfo::NONE), LeaveCC, ne);
__ str(r1, MemOperand(frame_pointer(), kAtStart));
@@ -1055,7 +1101,7 @@ int RegExpMacroAssemblerARM::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
int delta = code_handle->address() - re_code->address();
diff --git a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
index 5c8ed06..14f984f 100644
--- a/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
+++ b/src/3rdparty/v8/src/arm/regexp-macro-assembler-arm.h
@@ -79,6 +79,14 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
diff --git a/src/3rdparty/v8/src/arm/simulator-arm.cc b/src/3rdparty/v8/src/arm/simulator-arm.cc
index 542cc30..629c209 100644
--- a/src/3rdparty/v8/src/arm/simulator-arm.cc
+++ b/src/3rdparty/v8/src/arm/simulator-arm.cc
@@ -53,7 +53,7 @@ namespace internal {
// code.
class ArmDebugger {
public:
- explicit ArmDebugger(Simulator* sim);
+ explicit ArmDebugger(Simulator* sim) : sim_(sim) { }
~ArmDebugger();
void Stop(Instruction* instr);
@@ -84,11 +84,6 @@ class ArmDebugger {
};
-ArmDebugger::ArmDebugger(Simulator* sim) {
- sim_ = sim;
-}
-
-
ArmDebugger::~ArmDebugger() {
}
@@ -296,6 +291,13 @@ void ArmDebugger::Debug() {
if (line == NULL) {
break;
} else {
+ char* last_input = sim_->last_debugger_input();
+ if (strcmp(line, "\n") == 0 && last_input != NULL) {
+ line = last_input;
+ } else {
+ // Ownership is transferred to sim_;
+ sim_->set_last_debugger_input(line);
+ }
// Use sscanf to parse the individual parts of the command line. At the
// moment no command expects more than two parameters.
int argc = SScanF(line,
@@ -611,7 +613,6 @@ void ArmDebugger::Debug() {
PrintF("Unknown command: %s\n", cmd);
}
}
- DeleteArray(line);
}
// Add all the breakpoints back to stop execution and enter the debugger
@@ -645,6 +646,12 @@ static bool AllOnOnePage(uintptr_t start, int size) {
}
+void Simulator::set_last_debugger_input(char* input) {
+ DeleteArray(last_debugger_input_);
+ last_debugger_input_ = input;
+}
+
+
void Simulator::FlushICache(v8::internal::HashMap* i_cache,
void* start_addr,
size_t size) {
@@ -734,7 +741,7 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
isolate_->set_simulator_i_cache(i_cache_);
}
Initialize(isolate);
- // Setup simulator support first. Some of this information is needed to
+ // Set up simulator support first. Some of this information is needed to
// setup the architecture state.
size_t stack_size = 1 * 1024*1024; // allocate 1MB for stack
stack_ = reinterpret_cast<char*>(malloc(stack_size));
@@ -743,7 +750,7 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
break_pc_ = NULL;
break_instr_ = 0;
- // Setup architecture state.
+ // Set up architecture state.
// All registers are initialized to zero to start with.
for (int i = 0; i < num_registers; i++) {
registers_[i] = 0;
@@ -781,6 +788,8 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
registers_[pc] = bad_lr;
registers_[lr] = bad_lr;
InitializeCoverage();
+
+ last_debugger_input_ = NULL;
}
@@ -1268,9 +1277,9 @@ void Simulator::WriteDW(int32_t addr, int32_t value1, int32_t value2) {
// Returns the limit of the stack area to enable checking for stack overflows.
uintptr_t Simulator::StackLimit() const {
- // Leave a safety margin of 512 bytes to prevent overrunning the stack when
+ // Leave a safety margin of 1024 bytes to prevent overrunning the stack when
// pushing values.
- return reinterpret_cast<uintptr_t>(stack_) + 512;
+ return reinterpret_cast<uintptr_t>(stack_) + 1024;
}
@@ -3315,7 +3324,7 @@ void Simulator::Execute() {
int32_t Simulator::Call(byte* entry, int argument_count, ...) {
va_list parameters;
va_start(parameters, argument_count);
- // Setup arguments
+ // Set up arguments
// First four arguments passed in registers.
ASSERT(argument_count >= 4);
@@ -3358,7 +3367,7 @@ int32_t Simulator::Call(byte* entry, int argument_count, ...) {
int32_t r10_val = get_register(r10);
int32_t r11_val = get_register(r11);
- // Setup the callee-saved registers with a known value. To be able to check
+ // Set up the callee-saved registers with a known value. To be able to check
// that they are preserved properly across JS execution.
int32_t callee_saved_value = icount_;
set_register(r4, callee_saved_value);
diff --git a/src/3rdparty/v8/src/arm/simulator-arm.h b/src/3rdparty/v8/src/arm/simulator-arm.h
index 391ef69..585f1e0 100644
--- a/src/3rdparty/v8/src/arm/simulator-arm.h
+++ b/src/3rdparty/v8/src/arm/simulator-arm.h
@@ -194,6 +194,10 @@ class Simulator {
// Pop an address from the JS stack.
uintptr_t PopAddress();
+ // Debugger input.
+ void set_last_debugger_input(char* input);
+ char* last_debugger_input() { return last_debugger_input_; }
+
// ICache checking.
static void FlushICache(v8::internal::HashMap* i_cache, void* start,
size_t size);
@@ -360,6 +364,9 @@ class Simulator {
bool pc_modified_;
int icount_;
+ // Debugger input.
+ char* last_debugger_input_;
+
// Icache simulation
v8::internal::HashMap* i_cache_;
diff --git a/src/3rdparty/v8/src/arm/stub-cache-arm.cc b/src/3rdparty/v8/src/arm/stub-cache-arm.cc
index bf6f085..cfd93bc 100644
--- a/src/3rdparty/v8/src/arm/stub-cache-arm.cc
+++ b/src/3rdparty/v8/src/arm/stub-cache-arm.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,47 +43,83 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // Number of the cache entry, not scaled.
Register offset,
Register scratch,
- Register scratch2) {
+ Register scratch2,
+ Register offset_scratch) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
+ uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
// Check the relative positions of the address fields.
ASSERT(value_off_addr > key_off_addr);
ASSERT((value_off_addr - key_off_addr) % 4 == 0);
ASSERT((value_off_addr - key_off_addr) < (256 * 4));
+ ASSERT(map_off_addr > key_off_addr);
+ ASSERT((map_off_addr - key_off_addr) % 4 == 0);
+ ASSERT((map_off_addr - key_off_addr) < (256 * 4));
Label miss;
- Register offsets_base_addr = scratch;
+ Register base_addr = scratch;
+ scratch = no_reg;
+
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ add(offset_scratch, offset, Operand(offset, LSL, 1));
+
+ // Calculate the base address of the entry.
+ __ mov(base_addr, Operand(key_offset));
+ __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
// Check that the key in the entry matches the name.
- __ mov(offsets_base_addr, Operand(key_offset));
- __ ldr(ip, MemOperand(offsets_base_addr, offset, LSL, 1));
+ __ ldr(ip, MemOperand(base_addr, 0));
__ cmp(name, ip);
__ b(ne, &miss);
+ // Check the map matches.
+ __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
+ __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ cmp(ip, scratch2);
+ __ b(ne, &miss);
+
// Get the code entry from the cache.
- __ add(offsets_base_addr, offsets_base_addr,
- Operand(value_off_addr - key_off_addr));
- __ ldr(scratch2, MemOperand(offsets_base_addr, offset, LSL, 1));
+ Register code = scratch2;
+ scratch2 = no_reg;
+ __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
// Check that the flags match what we're looking for.
- __ ldr(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
- __ bic(scratch2, scratch2, Operand(Code::kFlagsNotUsedInLookup));
- __ cmp(scratch2, Operand(flags));
+ Register flags_reg = base_addr;
+ base_addr = no_reg;
+ __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
+ // It's a nice optimization if this constant is encodable in the bic insn.
+
+ uint32_t mask = Code::kFlagsNotUsedInLookup;
+ ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
+ __ bic(flags_reg, flags_reg, Operand(mask));
+ // Using cmn and the negative instead of cmp means we can use movw.
+ if (flags < 0) {
+ __ cmn(flags_reg, Operand(-flags));
+ } else {
+ __ cmp(flags_reg, Operand(flags));
+ }
__ b(ne, &miss);
- // Re-load code entry from cache.
- __ ldr(offset, MemOperand(offsets_base_addr, offset, LSL, 1));
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
// Jump to the first instruction in the code stub.
- __ add(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(offset);
+ __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
// Miss: fall through.
__ bind(&miss);
@@ -155,13 +191,14 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 12.
+ ASSERT(sizeof(Entry) == 12);
// Make sure the flags does not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -181,6 +218,11 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!scratch.is(no_reg));
ASSERT(!extra.is(no_reg));
ASSERT(!extra2.is(no_reg));
+ ASSERT(!extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
+ extra2, extra3);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
@@ -189,27 +231,51 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
__ ldr(scratch, FieldMemOperand(name, String::kHashFieldOffset));
__ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ add(scratch, scratch, Operand(ip));
- __ eor(scratch, scratch, Operand(flags));
- __ and_(scratch,
- scratch,
- Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
+ uint32_t mask = kPrimaryTableSize - 1;
+ // We shift out the last two bits because they are not part of the hash and
+ // they are always 01 for maps.
+ __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
+ // Mask down the eor argument to the minimum to keep the immediate
+ // ARM-encodable.
+ __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
+ // Prefer and_ to ubfx here because ubfx takes 2 cycles.
+ __ and_(scratch, scratch, Operand(mask));
// Probe the primary table.
- ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kPrimary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Primary miss: Compute hash for secondary probe.
- __ sub(scratch, scratch, Operand(name));
- __ add(scratch, scratch, Operand(flags));
- __ and_(scratch,
- scratch,
- Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
+ __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
+ uint32_t mask2 = kSecondaryTableSize - 1;
+ __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
+ __ and_(scratch, scratch, Operand(mask2));
// Probe the secondary table.
- ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kSecondary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
+ extra2, extra3);
}
@@ -376,13 +442,9 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
// r0 : value
Label exit;
- // Check that the receiver isn't a smi.
- __ JumpIfSmi(receiver_reg, miss_label);
-
- // Check that the map of the receiver hasn't changed.
- __ ldr(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
- __ cmp(scratch, Operand(Handle<Map>(object->map())));
- __ b(ne, miss_label);
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -566,16 +628,16 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
int argc) {
// ----------- S t a t e -------------
// -- sp[0] : holder (set by CheckPrototypes)
- // -- sp[4] : callee js function
+ // -- sp[4] : callee JS function
// -- sp[8] : call data
- // -- sp[12] : last js argument
+ // -- sp[12] : last JS argument
// -- ...
- // -- sp[(argc + 3) * 4] : first js argument
+ // -- sp[(argc + 3) * 4] : first JS argument
// -- sp[(argc + 4) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
- __ mov(r5, Operand(function));
+ __ LoadHeapObject(r5, function);
__ ldr(cp, FieldMemOperand(r5, JSFunction::kContextOffset));
// Pass the additional arguments FastHandleApiCall expects.
@@ -587,7 +649,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
} else {
__ Move(r6, call_data);
}
- // Store js function and call data.
+ // Store JS function and call data.
__ stm(ib, sp, r5.bit() | r6.bit());
// r2 points to call data as expected by Arguments
@@ -742,7 +804,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
__ InvokeFunction(optimization.constant_function(), arguments_,
- JUMP_FUNCTION, call_kind);
+ JUMP_FUNCTION, NullCallWrapper(), call_kind);
}
// Deferred code for fast API call case---clean preallocated space.
@@ -1019,10 +1081,9 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
__ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
} else {
Handle<Map> current_map(current->map());
- __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ cmp(scratch1, Operand(current_map));
- // Branch on the result of the map check.
- __ b(ne, miss);
+ __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
// Check access rights to the global object. This has to happen after
// the map check so that we know that the object is actually a global
// object.
@@ -1053,9 +1114,8 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
// Check the holder map.
- __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ cmp(scratch1, Operand(Handle<Map>(current->map())));
- __ b(ne, miss);
+ __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
+ DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform security check for access to the global object.
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -1099,7 +1159,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
@@ -1110,7 +1170,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
// Return the constant value.
- __ mov(r0, Operand(value));
+ __ LoadHeapObject(r0, value);
__ Ret();
}
@@ -1150,7 +1210,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ EnterExitFrame(false, kApiStackSpace);
// Create AccessorInfo instance on the stack above the exit frame with
- // scratch2 (internal::Object **args_) as the data.
+ // scratch2 (internal::Object** args_) as the data.
__ str(scratch2, MemOperand(sp, 1 * kPointerSize));
__ add(r1, sp, Operand(1 * kPointerSize)); // r1 = AccessorInfo&
@@ -1185,7 +1245,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// and CALLBACKS, so inline only them, other cases may be added
// later.
bool compile_followup_inline = false;
- if (lookup->IsProperty() && lookup->IsCacheable()) {
+ if (lookup->IsFound() && lookup->IsCacheable()) {
if (lookup->type() == FIELD) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
@@ -1327,14 +1387,8 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
// Get the receiver from the stack.
__ ldr(r0, MemOperand(sp, argc * kPointerSize));
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(r0, miss);
- }
-
// Check that the maps haven't changed.
+ __ JumpIfSmi(r0, miss);
CheckPrototypes(object, r0, holder, r3, r1, r4, name, miss);
}
@@ -1451,28 +1505,30 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret();
} else {
Label call_builtin;
- Register elements = r3;
- Register end_elements = r5;
- // Get the elements array of the object.
- __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ CheckMap(elements,
- r0,
- Heap::kFixedArrayMapRootIndex,
- &call_builtin,
- DONT_DO_SMI_CHECK);
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
+ Register elements = r6;
+ Register end_elements = r5;
+ // Get the elements array of the object.
+ __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ CheckMap(elements,
+ r0,
+ Heap::kFixedArrayMapRootIndex,
+ &call_builtin,
+ DONT_DO_SMI_CHECK);
+
+
// Get the array's length into r0 and calculate new length.
__ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(r0, r0, Operand(Smi::FromInt(argc)));
- // Get the element's length.
+ // Get the elements' length.
__ ldr(r4, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
@@ -1487,7 +1543,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
@@ -1502,13 +1558,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier);
- __ ldr(r6, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ CheckFastObjectElements(r6, r6, &call_builtin);
+ __ ldr(r3, FieldMemOperand(receiver, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(r3, r7, &not_fast_object);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(&not_fast_object);
+ __ CheckFastSmiOnlyElements(r3, r7, &call_builtin);
+ // edx: receiver
+ // r3: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ r3,
+ r7,
+ &call_builtin);
+ __ mov(r2, receiver);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(r3, r3, &call_builtin);
+ }
// Save new length.
__ str(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ add(end_elements, elements,
@@ -1554,25 +1630,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ add(end_elements, end_elements, Operand(kEndElementsOffset));
__ mov(r7, Operand(new_space_allocation_top));
- __ ldr(r6, MemOperand(r7));
- __ cmp(end_elements, r6);
+ __ ldr(r3, MemOperand(r7));
+ __ cmp(end_elements, r3);
__ b(ne, &call_builtin);
__ mov(r9, Operand(new_space_allocation_limit));
__ ldr(r9, MemOperand(r9));
- __ add(r6, r6, Operand(kAllocationDelta * kPointerSize));
- __ cmp(r6, r9);
+ __ add(r3, r3, Operand(kAllocationDelta * kPointerSize));
+ __ cmp(r3, r9);
__ b(hi, &call_builtin);
// We fit and could grow elements.
// Update new_space_allocation_top.
- __ str(r6, MemOperand(r7));
+ __ str(r3, MemOperand(r7));
// Push the argument.
__ str(r2, MemOperand(end_elements));
// Fill the rest with holes.
- __ LoadRoot(r6, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
- __ str(r6, MemOperand(end_elements, i * kPointerSize));
+ __ str(r3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.
@@ -1654,7 +1730,7 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
// We can't address the last element in one operation. Compute the more
// expensive shift first, and use an offset later on.
__ add(elements, elements, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ ldr(r0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ cmp(r0, r6);
__ b(eq, &call_builtin);
@@ -1662,7 +1738,7 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
__ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
- __ str(r6, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Drop(argc + 1);
__ Ret();
@@ -1727,7 +1803,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
Register receiver = r1;
Register index = r4;
- Register scratch = r3;
Register result = r0;
__ ldr(receiver, MemOperand(sp, argc * kPointerSize));
if (argc > 0) {
@@ -1738,7 +1813,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
StringCharCodeAtGenerator generator(receiver,
index,
- scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1809,8 +1883,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
Register receiver = r0;
Register index = r4;
- Register scratch1 = r1;
- Register scratch2 = r3;
+ Register scratch = r3;
Register result = r0;
__ ldr(receiver, MemOperand(sp, argc * kPointerSize));
if (argc > 0) {
@@ -1821,8 +1894,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
StringCharAtGenerator generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1914,7 +1986,8 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// r2: function name.
@@ -1993,7 +2066,7 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
__ vmrs(r3);
// Set custom FPCSR:
// - Set rounding mode to "Round towards Minus Infinity"
- // (ie bits [23:22] = 0b10).
+ // (i.e. bits [23:22] = 0b10).
// - Clear vfp cumulative exception flags (bits [3:0]).
// - Make sure Flush-to-zero mode control bit is unset (bit 22).
__ bic(r9, r3,
@@ -2059,7 +2132,8 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
__ bind(&slow);
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// r2: function name.
@@ -2157,7 +2231,8 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// r2: function name.
@@ -2269,7 +2344,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case STRING_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
// Check that the object is a two-byte string or a symbol.
__ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
__ b(ge, &miss);
@@ -2287,7 +2362,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case NUMBER_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a smi or a heap number.
__ JumpIfSmi(r1, &fast);
@@ -2308,7 +2383,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case BOOLEAN_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a boolean.
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
@@ -2335,7 +2410,8 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
// Handle call cache miss.
__ bind(&miss);
@@ -2415,7 +2491,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
__ str(r3, MemOperand(sp, argc * kPointerSize));
}
- // Setup the context (function already in r1).
+ // Set up the context (function already in r1).
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
@@ -2476,13 +2552,9 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(r1, &miss);
-
// Check that the map of the object hasn't changed.
- __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ cmp(r3, Operand(Handle<Map>(object->map())));
- __ b(ne, &miss);
+ __ CheckMap(r1, r3, Handle<Map>(object->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -2524,13 +2596,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(r1, &miss);
-
// Check that the map of the object hasn't changed.
- __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ cmp(r3, Operand(Handle<Map>(receiver->map())));
- __ b(ne, &miss);
+ __ CheckMap(r1, r3, Handle<Map>(receiver->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) {
@@ -2591,15 +2659,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
// Store the value in the cell.
__ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
-
- __ mov(r1, r0);
- __ RecordWriteField(r4,
- JSGlobalPropertyCell::kValueOffset,
- r1,
- r2,
- kLRHasNotBeenSaved,
- kDontSaveFPRegs,
- OMIT_REMEMBERED_SET);
+ // Cells are always rescanned, so no write barrier here.
Counters* counters = masm()->isolate()->counters();
__ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
@@ -2694,7 +2754,7 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name) {
// ----------- S t a t e -------------
// -- r0 : receiver
@@ -2747,14 +2807,8 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
// -----------------------------------
Label miss;
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(r0, &miss);
- }
-
// Check that the map of the global has not changed.
+ __ JumpIfSmi(r0, &miss);
CheckPrototypes(object, r0, holder, r3, r4, r1, name, &miss);
// Get the value from the cell.
@@ -2834,7 +2888,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
// ----------- S t a t e -------------
// -- lr : return address
// -- r0 : key
@@ -3046,7 +3100,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
ElementsKind elements_kind = receiver_map->elements_kind();
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
Handle<Code> stub =
- KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+ KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
__ DispatchMap(r2, r3, receiver_map, stub, DO_SMI_CHECK);
@@ -4091,7 +4145,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
void KeyedStoreStubCompiler::GenerateStoreFastElement(
MacroAssembler* masm,
bool is_js_array,
- ElementsKind elements_kind) {
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
@@ -4100,13 +4155,16 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// -- r3 : scratch
// -- r4 : scratch (elements)
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, grow, slow;
+ Label finish_store, check_capacity;
Register value_reg = r0;
Register key_reg = r1;
Register receiver_reg = r2;
- Register scratch = r3;
- Register elements_reg = r4;
+ Register scratch = r4;
+ Register elements_reg = r3;
+ Register length_reg = r5;
+ Register scratch2 = r6;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -4114,16 +4172,13 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi.
__ JumpIfNotSmi(key_reg, &miss_force_generic);
- // Get the elements array and make sure it is a fast element array, not 'cow'.
- __ ldr(elements_reg,
- FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
- __ CheckMap(elements_reg,
- scratch,
- Heap::kFixedArrayMapRootIndex,
- &miss_force_generic,
- DONT_DO_SMI_CHECK);
+ if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ __ JumpIfNotSmi(value_reg, &transition_elements_kind);
+ }
// Check that the key is within bounds.
+ __ ldr(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
if (is_js_array) {
__ ldr(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
} else {
@@ -4131,10 +4186,21 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
}
// Compare smis.
__ cmp(key_reg, scratch);
- __ b(hs, &miss_force_generic);
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ b(hs, &grow);
+ } else {
+ __ b(hs, &miss_force_generic);
+ }
+
+ // Make sure elements is a fast element array, not 'cow'.
+ __ CheckMap(elements_reg,
+ scratch,
+ Heap::kFixedArrayMapRootIndex,
+ &miss_force_generic,
+ DONT_DO_SMI_CHECK);
+ __ bind(&finish_store);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- __ JumpIfNotSmi(value_reg, &transition_elements_kind);
__ add(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4172,12 +4238,80 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags already set by previous compare.
+ __ b(ne, &miss_force_generic);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ ldr(length_reg,
+ FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ ldr(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
+ __ b(ne, &check_capacity);
+
+ int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
+ TAG_OBJECT);
+
+ __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
+ __ str(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
+ __ mov(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+ __ str(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
+ __ str(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
+ }
+
+ // Store the element at index zero.
+ __ str(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
+
+ // Install the new backing store in the JSArray.
+ __ str(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
+ scratch, kLRHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ mov(length_reg, Operand(Smi::FromInt(1)));
+ __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ Ret();
+
+ __ bind(&check_capacity);
+ // Check for cow elements, in general they are not handled by this stub
+ __ CheckMap(elements_reg,
+ scratch,
+ Heap::kFixedCOWArrayMapRootIndex,
+ &miss_force_generic,
+ DONT_DO_SMI_CHECK);
+
+ __ ldr(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+ __ cmp(length_reg, scratch);
+ __ b(hs, &slow);
+
+ // Grow the array and finish the store.
+ __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
+ __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ Jump(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
MacroAssembler* masm,
- bool is_js_array) {
+ bool is_js_array,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- r0 : value
// -- r1 : key
@@ -4187,7 +4321,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// -- r4 : scratch
// -- r5 : scratch
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, grow, slow;
+ Label finish_store, check_capacity;
Register value_reg = r0;
Register key_reg = r1;
@@ -4197,6 +4332,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
Register scratch2 = r5;
Register scratch3 = r6;
Register scratch4 = r7;
+ Register length_reg = r7;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -4215,8 +4351,13 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// Compare smis, unsigned compare catches both negative and out-of-bound
// indexes.
__ cmp(key_reg, scratch1);
- __ b(hs, &miss_force_generic);
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ b(hs, &grow);
+ } else {
+ __ b(hs, &miss_force_generic);
+ }
+ __ bind(&finish_store);
__ StoreNumberToDoubleElements(value_reg,
key_reg,
receiver_reg,
@@ -4237,6 +4378,73 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags already set by previous compare.
+ __ b(ne, &miss_force_generic);
+
+ // Transition on values that can't be stored in a FixedDoubleArray.
+ Label value_is_smi;
+ __ JumpIfSmi(value_reg, &value_is_smi);
+ __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
+ __ CompareRoot(scratch1, Heap::kHeapNumberMapRootIndex);
+ __ b(ne, &transition_elements_kind);
+ __ bind(&value_is_smi);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ ldr(length_reg,
+ FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ ldr(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ CompareRoot(elements_reg, Heap::kEmptyFixedArrayRootIndex);
+ __ b(ne, &check_capacity);
+
+ int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
+ TAG_OBJECT);
+
+ // Initialize the new FixedDoubleArray. Leave elements unitialized for
+ // efficiency, they are guaranteed to be initialized before use.
+ __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
+ __ str(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
+ __ mov(scratch1,
+ Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+ __ str(scratch1,
+ FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
+
+ // Install the new backing store in the JSArray.
+ __ str(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
+ scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ mov(length_reg, Operand(Smi::FromInt(1)));
+ __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&check_capacity);
+ // Make sure that the backing store can hold additional elements.
+ __ ldr(scratch1,
+ FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
+ __ cmp(length_reg, scratch1);
+ __ b(hs, &slow);
+
+ // Grow the array and finish the store.
+ __ add(length_reg, length_reg, Operand(Smi::FromInt(1)));
+ __ str(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ Jump(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
diff --git a/src/3rdparty/v8/src/array.js b/src/3rdparty/v8/src/array.js
index 214065c..daa75d5 100644
--- a/src/3rdparty/v8/src/array.js
+++ b/src/3rdparty/v8/src/array.js
@@ -27,7 +27,7 @@
// This file relies on the fact that the following declarations have been made
// in runtime.js:
-// const $Array = global.Array;
+// var $Array = global.Array;
// -------------------------------------------------------------------
@@ -172,12 +172,12 @@ function Join(array, length, separator, convert) {
} else {
for (var i = 0; i < length; i++) {
var e = array[i];
- if (IS_NUMBER(e)) {
- e = %_NumberToString(e);
- } else if (!IS_STRING(e)) {
- e = convert(e);
- }
- elements[i] = e;
+ if (IS_NUMBER(e)) {
+ e = %_NumberToString(e);
+ } else if (!IS_STRING(e)) {
+ e = convert(e);
+ }
+ elements[i] = e;
}
}
var result = %_FastAsciiArrayJoin(elements, separator);
@@ -204,7 +204,7 @@ function ConvertToLocaleString(e) {
if (IS_NULL_OR_UNDEFINED(e)) {
return '';
} else {
- // According to ES5, seciton 15.4.4.3, the toLocaleString conversion
+ // According to ES5, section 15.4.4.3, the toLocaleString conversion
// must throw a TypeError if ToObject(e).toLocaleString isn't
// callable.
var e_obj = ToObject(e);
@@ -328,8 +328,9 @@ function SimpleSlice(array, start_i, del_count, len, deleted_elements) {
// would be the appropriate test. We follow KJS in consulting the
// prototype.
var current = array[index];
- if (!IS_UNDEFINED(current) || index in array)
+ if (!IS_UNDEFINED(current) || index in array) {
deleted_elements[i] = current;
+ }
}
}
@@ -495,12 +496,12 @@ function SparseReverse(array, len) {
if (j_complement <= i) {
high = j;
- while (keys[--high_counter] == j);
+ while (keys[--high_counter] == j) { }
low = j_complement;
}
if (j_complement >= i) {
low = i;
- while (keys[++low_counter] == i);
+ while (keys[++low_counter] == i) { }
high = len - i - 1;
}
@@ -576,10 +577,11 @@ function ArrayShift() {
var first = this[0];
- if (IS_ARRAY(this))
+ if (IS_ARRAY(this)) {
SmartMove(this, 0, 1, len, 0);
- else
+ } else {
SimpleMove(this, 0, 1, len, 0);
+ }
this.length = len - 1;
@@ -596,10 +598,11 @@ function ArrayUnshift(arg1) { // length == 1
var len = TO_UINT32(this.length);
var num_arguments = %_ArgumentsLength();
- if (IS_ARRAY(this))
+ if (IS_ARRAY(this)) {
SmartMove(this, 0, 0, len, num_arguments);
- else
+ } else {
SimpleMove(this, 0, 0, len, num_arguments);
+ }
for (var i = 0; i < num_arguments; i++) {
this[i] = %_Arguments(i);
@@ -754,7 +757,7 @@ function ArraySort(comparefn) {
}
var receiver = %GetDefaultReceiver(comparefn);
- function InsertionSort(a, from, to) {
+ var InsertionSort = function InsertionSort(a, from, to) {
for (var i = from + 1; i < to; i++) {
var element = a[i];
for (var j = i - 1; j >= from; j--) {
@@ -768,9 +771,9 @@ function ArraySort(comparefn) {
}
a[j + 1] = element;
}
- }
+ };
- function QuickSort(a, from, to) {
+ var QuickSort = function QuickSort(a, from, to) {
// Insertion sort is faster for short arrays.
if (to - from <= 10) {
InsertionSort(a, from, to);
@@ -838,12 +841,12 @@ function ArraySort(comparefn) {
}
QuickSort(a, from, low_end);
QuickSort(a, high_start, to);
- }
+ };
// Copy elements in the range 0..length from obj's prototype chain
// to obj itself, if obj has holes. Return one more than the maximal index
// of a prototype property.
- function CopyFromPrototype(obj, length) {
+ var CopyFromPrototype = function CopyFromPrototype(obj, length) {
var max = 0;
for (var proto = obj.__proto__; proto; proto = proto.__proto__) {
var indices = %GetArrayKeys(proto, length);
@@ -870,12 +873,12 @@ function ArraySort(comparefn) {
}
}
return max;
- }
+ };
// Set a value of "undefined" on all indices in the range from..to
// where a prototype of obj has an element. I.e., shadow all prototype
// elements in that range.
- function ShadowPrototypeElements(obj, from, to) {
+ var ShadowPrototypeElements = function(obj, from, to) {
for (var proto = obj.__proto__; proto; proto = proto.__proto__) {
var indices = %GetArrayKeys(proto, to);
if (indices.length > 0) {
@@ -898,9 +901,9 @@ function ArraySort(comparefn) {
}
}
}
- }
+ };
- function SafeRemoveArrayHoles(obj) {
+ var SafeRemoveArrayHoles = function SafeRemoveArrayHoles(obj) {
// Copy defined elements from the end to fill in all holes and undefineds
// in the beginning of the array. Write undefineds and holes at the end
// after loop is finished.
@@ -955,7 +958,7 @@ function ArraySort(comparefn) {
// Return the number of defined elements.
return first_undefined;
- }
+ };
var length = TO_UINT32(this.length);
if (length < 2) return this;
@@ -1021,10 +1024,10 @@ function ArrayFilter(f, receiver) {
var accumulator = new InternalArray();
var accumulator_length = 0;
for (var i = 0; i < length; i++) {
- var current = array[i];
- if (!IS_UNDEFINED(current) || i in array) {
- if (%_CallFunction(receiver, current, i, array, f)) {
- accumulator[accumulator_length++] = current;
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(receiver, element, i, array, f)) {
+ accumulator[accumulator_length++] = element;
}
}
}
@@ -1054,9 +1057,9 @@ function ArrayForEach(f, receiver) {
}
for (var i = 0; i < length; i++) {
- var current = array[i];
- if (!IS_UNDEFINED(current) || i in array) {
- %_CallFunction(receiver, current, i, array, f);
+ if (i in array) {
+ var element = array[i];
+ %_CallFunction(receiver, element, i, array, f);
}
}
}
@@ -1085,9 +1088,9 @@ function ArraySome(f, receiver) {
}
for (var i = 0; i < length; i++) {
- var current = array[i];
- if (!IS_UNDEFINED(current) || i in array) {
- if (%_CallFunction(receiver, current, i, array, f)) return true;
+ if (i in array) {
+ var element = array[i];
+ if (%_CallFunction(receiver, element, i, array, f)) return true;
}
}
return false;
@@ -1115,9 +1118,9 @@ function ArrayEvery(f, receiver) {
}
for (var i = 0; i < length; i++) {
- var current = array[i];
- if (!IS_UNDEFINED(current) || i in array) {
- if (!%_CallFunction(receiver, current, i, array, f)) return false;
+ if (i in array) {
+ var element = array[i];
+ if (!%_CallFunction(receiver, element, i, array, f)) return false;
}
}
return true;
@@ -1146,9 +1149,9 @@ function ArrayMap(f, receiver) {
var result = new $Array();
var accumulator = new InternalArray(length);
for (var i = 0; i < length; i++) {
- var current = array[i];
- if (!IS_UNDEFINED(current) || i in array) {
- accumulator[i] = %_CallFunction(receiver, current, i, array, f);
+ if (i in array) {
+ var element = array[i];
+ accumulator[i] = %_CallFunction(receiver, element, i, array, f);
}
}
%MoveArrayContents(accumulator, result);
@@ -1305,8 +1308,8 @@ function ArrayReduce(callback, current) {
var receiver = %GetDefaultReceiver(callback);
for (; i < length; i++) {
- var element = array[i];
- if (!IS_UNDEFINED(element) || i in array) {
+ if (i in array) {
+ var element = array[i];
current = %_CallFunction(receiver, current, element, i, array, callback);
}
}
@@ -1342,8 +1345,8 @@ function ArrayReduceRight(callback, current) {
var receiver = %GetDefaultReceiver(callback);
for (; i >= 0; i--) {
- var element = array[i];
- if (!IS_UNDEFINED(element) || i in array) {
+ if (i in array) {
+ var element = array[i];
current = %_CallFunction(receiver, current, element, i, array, callback);
}
}
@@ -1370,7 +1373,7 @@ function SetUpArray() {
var specialFunctions = %SpecialArrayFunctions({});
- function getFunction(name, jsBuiltin, len) {
+ var getFunction = function(name, jsBuiltin, len) {
var f = jsBuiltin;
if (specialFunctions.hasOwnProperty(name)) {
f = specialFunctions[name];
@@ -1379,13 +1382,13 @@ function SetUpArray() {
%FunctionSetLength(f, len);
}
return f;
- }
+ };
// Set up non-enumerable functions of the Array.prototype object and
// set their names.
// Manipulate the length of some of the functions to meet
// expectations set by ECMA-262 or Mozilla.
- InstallFunctionsOnHiddenPrototype($Array.prototype, DONT_ENUM, $Array(
+ InstallFunctions($Array.prototype, DONT_ENUM, $Array(
"toString", getFunction("toString", ArrayToString),
"toLocaleString", getFunction("toLocaleString", ArrayToLocaleString),
"join", getFunction("join", ArrayJoin),
diff --git a/src/3rdparty/v8/src/assembler.cc b/src/3rdparty/v8/src/assembler.cc
index 4dc2394..40765b3 100644
--- a/src/3rdparty/v8/src/assembler.cc
+++ b/src/3rdparty/v8/src/assembler.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2011 Sun Microsystems Inc.
+// Copyright (c) 1994-2006 Sun Microsystems Inc.
// All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
@@ -30,25 +30,43 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
-#include "v8.h"
+#include "assembler.h"
-#include "arguments.h"
+#include <math.h> // For cos, log, pow, sin, tan, etc.
+#include "api.h"
+#include "builtins.h"
+#include "counters.h"
+#include "cpu.h"
+#include "debug.h"
#include "deoptimizer.h"
#include "execution.h"
-#include "ic-inl.h"
-#include "incremental-marking.h"
-#include "factory.h"
+#include "ic.h"
+#include "isolate.h"
+#include "jsregexp.h"
+#include "lazy-instance.h"
+#include "platform.h"
+#include "regexp-macro-assembler.h"
+#include "regexp-stack.h"
#include "runtime.h"
-#include "runtime-profiler.h"
#include "serialize.h"
+#include "store-buffer-inl.h"
#include "stub-cache.h"
-#include "regexp-stack.h"
-#include "ast.h"
-#include "regexp-macro-assembler.h"
-#include "platform.h"
-#include "store-buffer.h"
+#include "token.h"
+
+#if V8_TARGET_ARCH_IA32
+#include "ia32/assembler-ia32-inl.h"
+#elif V8_TARGET_ARCH_X64
+#include "x64/assembler-x64-inl.h"
+#elif V8_TARGET_ARCH_ARM
+#include "arm/assembler-arm-inl.h"
+#elif V8_TARGET_ARCH_MIPS
+#include "mips/assembler-mips-inl.h"
+#else
+#error "Unknown architecture."
+#endif
+
// Include native regexp-macro-assembler.
#ifndef V8_INTERPRETED_REGEXP
#if V8_TARGET_ARCH_IA32
@@ -67,15 +85,22 @@
namespace v8 {
namespace internal {
+// -----------------------------------------------------------------------------
+// Common double constants.
+
+struct DoubleConstant BASE_EMBEDDED {
+ double min_int;
+ double one_half;
+ double minus_zero;
+ double zero;
+ double uint8_max_value;
+ double negative_infinity;
+ double canonical_non_hole_nan;
+ double the_hole_nan;
+};
+
+static DoubleConstant double_constants;
-const double DoubleConstant::min_int = kMinInt;
-const double DoubleConstant::one_half = 0.5;
-const double DoubleConstant::minus_zero = -0.0;
-const double DoubleConstant::uint8_max_value = 255;
-const double DoubleConstant::zero = 0.0;
-const double DoubleConstant::canonical_non_hole_nan = OS::nan_value();
-const double DoubleConstant::the_hole_nan = BitCast<double>(kHoleNanInt64);
-const double DoubleConstant::negative_infinity = -V8_INFINITY;
const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// -----------------------------------------------------------------------------
@@ -687,6 +712,18 @@ void RelocInfo::Verify() {
// -----------------------------------------------------------------------------
// Implementation of ExternalReference
+void ExternalReference::SetUp() {
+ double_constants.min_int = kMinInt;
+ double_constants.one_half = 0.5;
+ double_constants.minus_zero = -0.0;
+ double_constants.uint8_max_value = 255;
+ double_constants.zero = 0.0;
+ double_constants.canonical_non_hole_nan = OS::nan_value();
+ double_constants.the_hole_nan = BitCast<double>(kHoleNanInt64);
+ double_constants.negative_infinity = -V8_INFINITY;
+}
+
+
ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
: address_(Redirect(isolate, Builtins::c_function_address(id))) {}
@@ -796,6 +833,17 @@ ExternalReference ExternalReference::random_uint32_function(
}
+ExternalReference ExternalReference::get_date_field_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate, FUNCTION_ADDR(JSDate::GetField)));
+}
+
+
+ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
+ return ExternalReference(isolate->date_cache()->stamp_address());
+}
+
+
ExternalReference ExternalReference::transcendental_cache_array_address(
Isolate* isolate) {
return ExternalReference(
@@ -817,11 +865,6 @@ ExternalReference ExternalReference::compute_output_frames_function(
}
-ExternalReference ExternalReference::global_contexts_list(Isolate* isolate) {
- return ExternalReference(isolate->heap()->global_contexts_list_address());
-}
-
-
ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
}
@@ -913,50 +956,47 @@ ExternalReference ExternalReference::scheduled_exception_address(
ExternalReference ExternalReference::address_of_min_int() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::min_int)));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.min_int));
}
ExternalReference ExternalReference::address_of_one_half() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::one_half)));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.one_half));
}
ExternalReference ExternalReference::address_of_minus_zero() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::minus_zero)));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.minus_zero));
}
ExternalReference ExternalReference::address_of_zero() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::zero)));
+ return ExternalReference(reinterpret_cast<void*>(&double_constants.zero));
}
ExternalReference ExternalReference::address_of_uint8_max_value() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::uint8_max_value)));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.uint8_max_value));
}
ExternalReference ExternalReference::address_of_negative_infinity() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::negative_infinity)));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.negative_infinity));
}
ExternalReference ExternalReference::address_of_canonical_non_hole_nan() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::canonical_non_hole_nan)));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.canonical_non_hole_nan));
}
ExternalReference ExternalReference::address_of_the_hole_nan() {
- return ExternalReference(reinterpret_cast<void*>(
- const_cast<double*>(&DoubleConstant::the_hole_nan)));
+ return ExternalReference(
+ reinterpret_cast<void*>(&double_constants.the_hole_nan));
}
@@ -1051,6 +1091,11 @@ static double math_cos_double(double x) {
}
+static double math_tan_double(double x) {
+ return tan(x);
+}
+
+
static double math_log_double(double x) {
return log(x);
}
@@ -1072,6 +1117,14 @@ ExternalReference ExternalReference::math_cos_double_function(
}
+ExternalReference ExternalReference::math_tan_double_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(math_tan_double),
+ BUILTIN_FP_CALL));
+}
+
+
ExternalReference ExternalReference::math_log_double_function(
Isolate* isolate) {
return ExternalReference(Redirect(isolate,
@@ -1100,17 +1153,9 @@ double power_double_int(double x, int y) {
double power_double_double(double x, double y) {
- int y_int = static_cast<int>(y);
- if (y == y_int) {
- return power_double_int(x, y_int); // Returns 1.0 for exponent 0.
- }
- if (!isinf(x)) {
- if (y == 0.5) return sqrt(x + 0.0); // -0 must be converted to +0.
- if (y == -0.5) return 1.0 / sqrt(x + 0.0);
- }
- if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) {
- return OS::nan_value();
- }
+ // The checks for special cases can be dropped in ia32 because it has already
+ // been done in generated code before bailing out here.
+ if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) return OS::nan_value();
return pow(x, y);
}
diff --git a/src/3rdparty/v8/src/assembler.h b/src/3rdparty/v8/src/assembler.h
index 5b71363..6deca26 100644
--- a/src/3rdparty/v8/src/assembler.h
+++ b/src/3rdparty/v8/src/assembler.h
@@ -30,19 +30,27 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#ifndef V8_ASSEMBLER_H_
#define V8_ASSEMBLER_H_
+#include "v8.h"
+
#include "allocation.h"
+#include "builtins.h"
#include "gdb-jit.h"
+#include "isolate.h"
#include "runtime.h"
#include "token.h"
namespace v8 {
+
+class ApiFunction;
+
namespace internal {
+struct StatsCounter;
const unsigned kNoASTId = -1;
// -----------------------------------------------------------------------------
// Platform independent assembler base class.
@@ -59,21 +67,6 @@ class AssemblerBase: public Malloced {
int jit_cookie_;
};
-// -----------------------------------------------------------------------------
-// Common double constants.
-
-class DoubleConstant: public AllStatic {
- public:
- static const double min_int;
- static const double one_half;
- static const double minus_zero;
- static const double zero;
- static const double uint8_max_value;
- static const double negative_infinity;
- static const double canonical_non_hole_nan;
- static const double the_hole_nan;
-};
-
// -----------------------------------------------------------------------------
// Labels represent pc locations; they are typically jump or call targets.
@@ -271,7 +264,7 @@ class RelocInfo BASE_EMBEDDED {
INLINE(void apply(intptr_t delta));
// Is the pointer this relocation info refers to coded like a plain pointer
- // or is it strange in some way (eg relative or patched into a series of
+ // or is it strange in some way (e.g. relative or patched into a series of
// instructions).
bool IsCodedSpecially();
@@ -371,7 +364,7 @@ class RelocInfo BASE_EMBEDDED {
// routines expect to access these pointers indirectly. The following
// location provides a place for these pointers to exist natually
// when accessed via the Iterator.
- Object *reconstructed_obj_ptr_;
+ Object* reconstructed_obj_ptr_;
// External-reference pointers are also split across instruction-pairs
// in mips, but are accessed via indirect pointers. This location
// provides a place for that pointer to exist naturally. Its address
@@ -542,6 +535,8 @@ class ExternalReference BASE_EMBEDDED {
DIRECT_GETTER_CALL
};
+ static void SetUp();
+
typedef void* ExternalReferenceRedirector(void* original, Type type);
ExternalReference(Builtins::CFunctionId id, Isolate* isolate);
@@ -587,10 +582,12 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference transcendental_cache_array_address(Isolate* isolate);
static ExternalReference delete_handle_scope_extensions(Isolate* isolate);
+ static ExternalReference get_date_field_function(Isolate* isolate);
+ static ExternalReference date_cache_stamp(Isolate* isolate);
+
// Deoptimization support.
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate* isolate);
- static ExternalReference global_contexts_list(Isolate* isolate);
// Static data in the keyed lookup cache.
static ExternalReference keyed_lookup_cache_keys(Isolate* isolate);
@@ -652,6 +649,7 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference math_sin_double_function(Isolate* isolate);
static ExternalReference math_cos_double_function(Isolate* isolate);
+ static ExternalReference math_tan_double_function(Isolate* isolate);
static ExternalReference math_log_double_function(Isolate* isolate);
Address address() const {return reinterpret_cast<Address>(address_);}
@@ -816,33 +814,33 @@ class PreservePositionScope BASE_EMBEDDED {
// -----------------------------------------------------------------------------
// Utility functions
-static inline bool is_intn(int x, int n) {
+inline bool is_intn(int x, int n) {
return -(1 << (n-1)) <= x && x < (1 << (n-1));
}
-static inline bool is_int8(int x) { return is_intn(x, 8); }
-static inline bool is_int16(int x) { return is_intn(x, 16); }
-static inline bool is_int18(int x) { return is_intn(x, 18); }
-static inline bool is_int24(int x) { return is_intn(x, 24); }
+inline bool is_int8(int x) { return is_intn(x, 8); }
+inline bool is_int16(int x) { return is_intn(x, 16); }
+inline bool is_int18(int x) { return is_intn(x, 18); }
+inline bool is_int24(int x) { return is_intn(x, 24); }
-static inline bool is_uintn(int x, int n) {
+inline bool is_uintn(int x, int n) {
return (x & -(1 << n)) == 0;
}
-static inline bool is_uint2(int x) { return is_uintn(x, 2); }
-static inline bool is_uint3(int x) { return is_uintn(x, 3); }
-static inline bool is_uint4(int x) { return is_uintn(x, 4); }
-static inline bool is_uint5(int x) { return is_uintn(x, 5); }
-static inline bool is_uint6(int x) { return is_uintn(x, 6); }
-static inline bool is_uint8(int x) { return is_uintn(x, 8); }
-static inline bool is_uint10(int x) { return is_uintn(x, 10); }
-static inline bool is_uint12(int x) { return is_uintn(x, 12); }
-static inline bool is_uint16(int x) { return is_uintn(x, 16); }
-static inline bool is_uint24(int x) { return is_uintn(x, 24); }
-static inline bool is_uint26(int x) { return is_uintn(x, 26); }
-static inline bool is_uint28(int x) { return is_uintn(x, 28); }
-
-static inline int NumberOfBitsSet(uint32_t x) {
+inline bool is_uint2(int x) { return is_uintn(x, 2); }
+inline bool is_uint3(int x) { return is_uintn(x, 3); }
+inline bool is_uint4(int x) { return is_uintn(x, 4); }
+inline bool is_uint5(int x) { return is_uintn(x, 5); }
+inline bool is_uint6(int x) { return is_uintn(x, 6); }
+inline bool is_uint8(int x) { return is_uintn(x, 8); }
+inline bool is_uint10(int x) { return is_uintn(x, 10); }
+inline bool is_uint12(int x) { return is_uintn(x, 12); }
+inline bool is_uint16(int x) { return is_uintn(x, 16); }
+inline bool is_uint24(int x) { return is_uintn(x, 24); }
+inline bool is_uint26(int x) { return is_uintn(x, 26); }
+inline bool is_uint28(int x) { return is_uintn(x, 28); }
+
+inline int NumberOfBitsSet(uint32_t x) {
unsigned int num_bits_set;
for (num_bits_set = 0; x; x >>= 1) {
num_bits_set += x & 1;
diff --git a/src/3rdparty/v8/src/ast-inl.h b/src/3rdparty/v8/src/ast-inl.h
deleted file mode 100644
index 217c71f..0000000
--- a/src/3rdparty/v8/src/ast-inl.h
+++ /dev/null
@@ -1,136 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_AST_INL_H_
-#define V8_AST_INL_H_
-
-#include "v8.h"
-
-#include "ast.h"
-#include "scopes.h"
-
-namespace v8 {
-namespace internal {
-
-
-SwitchStatement::SwitchStatement(Isolate* isolate,
- ZoneStringList* labels)
- : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
- tag_(NULL), cases_(NULL) {
-}
-
-
-Block::Block(Isolate* isolate,
- ZoneStringList* labels,
- int capacity,
- bool is_initializer_block)
- : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
- statements_(capacity),
- is_initializer_block_(is_initializer_block),
- block_scope_(NULL) {
-}
-
-
-BreakableStatement::BreakableStatement(Isolate* isolate,
- ZoneStringList* labels,
- Type type)
- : labels_(labels),
- type_(type),
- entry_id_(GetNextId(isolate)),
- exit_id_(GetNextId(isolate)) {
- ASSERT(labels == NULL || labels->length() > 0);
-}
-
-
-IterationStatement::IterationStatement(Isolate* isolate, ZoneStringList* labels)
- : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
- body_(NULL),
- continue_target_(),
- osr_entry_id_(GetNextId(isolate)) {
-}
-
-
-DoWhileStatement::DoWhileStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- cond_(NULL),
- condition_position_(-1),
- continue_id_(GetNextId(isolate)),
- back_edge_id_(GetNextId(isolate)) {
-}
-
-
-WhileStatement::WhileStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- cond_(NULL),
- may_have_function_literal_(true),
- body_id_(GetNextId(isolate)) {
-}
-
-
-ForStatement::ForStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- init_(NULL),
- cond_(NULL),
- next_(NULL),
- may_have_function_literal_(true),
- loop_variable_(NULL),
- continue_id_(GetNextId(isolate)),
- body_id_(GetNextId(isolate)) {
-}
-
-
-ForInStatement::ForInStatement(Isolate* isolate, ZoneStringList* labels)
- : IterationStatement(isolate, labels),
- each_(NULL),
- enumerable_(NULL),
- assignment_id_(GetNextId(isolate)) {
-}
-
-
-int FunctionLiteral::start_position() const {
- return scope()->start_position();
-}
-
-
-int FunctionLiteral::end_position() const {
- return scope()->end_position();
-}
-
-
-StrictModeFlag FunctionLiteral::strict_mode_flag() const {
- return scope()->strict_mode_flag();
-}
-
-
-bool FunctionLiteral::qml_mode() const {
- return scope()->is_qml_mode();
-}
-
-
-} } // namespace v8::internal
-
-#endif // V8_AST_INL_H_
diff --git a/src/3rdparty/v8/src/ast.cc b/src/3rdparty/v8/src/ast.cc
index 3be53ac..4b6ae68 100644
--- a/src/3rdparty/v8/src/ast.cc
+++ b/src/3rdparty/v8/src/ast.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,10 +25,15 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "v8.h"
-
#include "ast.h"
+
+#include <math.h> // For isfinite.
+#include "builtins.h"
+#include "conversions.h"
+#include "hashmap.h"
#include "parser.h"
+#include "property-details.h"
+#include "property.h"
#include "scopes.h"
#include "string-stream.h"
#include "type-info.h"
@@ -70,7 +75,9 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
var_(NULL), // Will be set by the call to BindTo.
is_this_(var->is_this()),
is_trivial_(false),
- position_(RelocInfo::kNoPosition) {
+ is_lvalue_(false),
+ position_(RelocInfo::kNoPosition),
+ interface_(var->interface()) {
BindTo(var);
}
@@ -78,13 +85,16 @@ VariableProxy::VariableProxy(Isolate* isolate, Variable* var)
VariableProxy::VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position)
+ int position,
+ Interface* interface)
: Expression(isolate),
name_(name),
var_(NULL),
is_this_(is_this),
is_trivial_(false),
- position_(position) {
+ is_lvalue_(false),
+ position_(position),
+ interface_(interface) {
// Names must be canonicalized for fast equality checks.
ASSERT(name->IsSymbol());
}
@@ -119,18 +129,7 @@ Assignment::Assignment(Isolate* isolate,
assignment_id_(GetNextId(isolate)),
block_start_(false),
block_end_(false),
- is_monomorphic_(false) {
- ASSERT(Token::IsAssignmentOp(op));
- if (is_compound()) {
- binary_operation_ =
- new(isolate->zone()) BinaryOperation(isolate,
- binary_op(),
- target,
- value,
- pos + 1);
- compound_load_id_ = GetNextId(isolate);
- }
-}
+ is_monomorphic_(false) { }
Token::Value Assignment::binary_op() const {
@@ -157,12 +156,30 @@ bool FunctionLiteral::AllowsLazyCompilation() {
}
-ObjectLiteral::Property::Property(Literal* key, Expression* value) {
+int FunctionLiteral::start_position() const {
+ return scope()->start_position();
+}
+
+
+int FunctionLiteral::end_position() const {
+ return scope()->end_position();
+}
+
+
+LanguageMode FunctionLiteral::language_mode() const {
+ return scope()->language_mode();
+}
+
+
+ObjectLiteral::Property::Property(Literal* key,
+ Expression* value,
+ Isolate* isolate) {
emit_store_ = true;
key_ = key;
value_ = value;
Object* k = *key->handle();
- if (k->IsSymbol() && HEAP->Proto_symbol()->Equals(String::cast(k))) {
+ if (k->IsSymbol() &&
+ isolate->heap()->Proto_symbol()->Equals(String::cast(k))) {
kind_ = PROTOTYPE;
} else if (value_->AsMaterializedLiteral() != NULL) {
kind_ = MATERIALIZED_LITERAL;
@@ -175,9 +192,7 @@ ObjectLiteral::Property::Property(Literal* key, Expression* value) {
ObjectLiteral::Property::Property(bool is_getter, FunctionLiteral* value) {
- Isolate* isolate = Isolate::Current();
emit_store_ = true;
- key_ = new(isolate->zone()) Literal(isolate, value->name());
value_ = value;
kind_ = is_getter ? GETTER : SETTER;
}
@@ -228,55 +243,21 @@ bool IsEqualNumber(void* first, void* second) {
void ObjectLiteral::CalculateEmitStore() {
- HashMap properties(&IsEqualString);
- HashMap elements(&IsEqualNumber);
- for (int i = this->properties()->length() - 1; i >= 0; i--) {
- ObjectLiteral::Property* property = this->properties()->at(i);
+ ZoneHashMap table(Literal::Match);
+ for (int i = properties()->length() - 1; i >= 0; i--) {
+ ObjectLiteral::Property* property = properties()->at(i);
Literal* literal = property->key();
- Handle<Object> handle = literal->handle();
-
- if (handle->IsNull()) {
- continue;
- }
-
- uint32_t hash;
- HashMap* table;
- void* key;
- Factory* factory = Isolate::Current()->factory();
- if (handle->IsSymbol()) {
- Handle<String> name(String::cast(*handle));
- if (name->AsArrayIndex(&hash)) {
- Handle<Object> key_handle = factory->NewNumberFromUint(hash);
- key = key_handle.location();
- table = &elements;
- } else {
- key = name.location();
- hash = name->Hash();
- table = &properties;
- }
- } else if (handle->ToArrayIndex(&hash)) {
- key = handle.location();
- table = &elements;
- } else {
- ASSERT(handle->IsNumber());
- double num = handle->Number();
- char arr[100];
- Vector<char> buffer(arr, ARRAY_SIZE(arr));
- const char* str = DoubleToCString(num, buffer);
- Handle<String> name = factory->NewStringFromAscii(CStrVector(str));
- key = name.location();
- hash = name->Hash();
- table = &properties;
- }
+ if (literal->handle()->IsNull()) continue;
+ uint32_t hash = literal->Hash();
// If the key of a computed property is in the table, do not emit
// a store for the property later.
- if (property->kind() == ObjectLiteral::Property::COMPUTED) {
- if (table->Lookup(key, hash, false) != NULL) {
- property->set_emit_store(false);
- }
+ if (property->kind() == ObjectLiteral::Property::COMPUTED &&
+ table.Lookup(literal, hash, false) != NULL) {
+ property->set_emit_store(false);
+ } else {
+ // Add key to the table.
+ table.Lookup(literal, hash, true);
}
- // Add key to the table.
- table->Lookup(key, hash, true);
}
}
@@ -405,232 +386,22 @@ bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
// Inlining support
bool Declaration::IsInlineable() const {
- return proxy()->var()->IsStackAllocated() && fun() == NULL;
+ return proxy()->var()->IsStackAllocated();
}
-
-bool TargetCollector::IsInlineable() const {
- UNREACHABLE();
+bool FunctionDeclaration::IsInlineable() const {
return false;
}
-bool ForInStatement::IsInlineable() const {
- return false;
-}
-
-
-bool WithStatement::IsInlineable() const {
- return false;
-}
-
-
-bool SwitchStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryCatchStatement::IsInlineable() const {
- return false;
-}
-
-
-bool TryFinallyStatement::IsInlineable() const {
- return false;
-}
-
-
-bool DebuggerStatement::IsInlineable() const {
- return false;
-}
-
-
-bool Throw::IsInlineable() const {
- return exception()->IsInlineable();
-}
-
-
-bool MaterializedLiteral::IsInlineable() const {
- // TODO(1322): Allow materialized literals.
- return false;
-}
-
-
-bool FunctionLiteral::IsInlineable() const {
- // TODO(1322): Allow materialized literals.
- return false;
-}
-
-
-bool ThisFunction::IsInlineable() const {
- return true;
-}
-
-
-bool SharedFunctionInfoLiteral::IsInlineable() const {
- return false;
-}
-
-
-bool ForStatement::IsInlineable() const {
- return (init() == NULL || init()->IsInlineable())
- && (cond() == NULL || cond()->IsInlineable())
- && (next() == NULL || next()->IsInlineable())
- && body()->IsInlineable();
-}
-
-
-bool WhileStatement::IsInlineable() const {
- return cond()->IsInlineable()
- && body()->IsInlineable();
-}
-
-
-bool DoWhileStatement::IsInlineable() const {
- return cond()->IsInlineable()
- && body()->IsInlineable();
-}
-
-
-bool ContinueStatement::IsInlineable() const {
- return true;
-}
-
-
-bool BreakStatement::IsInlineable() const {
- return true;
-}
-
-
-bool EmptyStatement::IsInlineable() const {
- return true;
-}
-
-
-bool Literal::IsInlineable() const {
- return true;
-}
-
-
-bool Block::IsInlineable() const {
- const int count = statements_.length();
- for (int i = 0; i < count; ++i) {
- if (!statements_[i]->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool ExpressionStatement::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool IfStatement::IsInlineable() const {
- return condition()->IsInlineable()
- && then_statement()->IsInlineable()
- && else_statement()->IsInlineable();
-}
-
-
-bool ReturnStatement::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool Conditional::IsInlineable() const {
- return condition()->IsInlineable() && then_expression()->IsInlineable() &&
- else_expression()->IsInlineable();
-}
-
-
-bool VariableProxy::IsInlineable() const {
- return var()->IsUnallocated()
- || var()->IsStackAllocated()
- || var()->IsContextSlot();
-}
-
-
-bool Assignment::IsInlineable() const {
- return target()->IsInlineable() && value()->IsInlineable();
-}
-
-
-bool Property::IsInlineable() const {
- return obj()->IsInlineable() && key()->IsInlineable();
-}
-
-
-bool Call::IsInlineable() const {
- if (!expression()->IsInlineable()) return false;
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool CallNew::IsInlineable() const {
- if (!expression()->IsInlineable()) return false;
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool CallRuntime::IsInlineable() const {
- // Don't try to inline JS runtime calls because we don't (currently) even
- // optimize them.
- if (is_jsruntime()) return false;
- // Don't inline the %_ArgumentsLength or %_Arguments because their
- // implementation will not work. There is no stack frame to get them
- // from.
- if (function()->intrinsic_type == Runtime::INLINE &&
- (name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
- name()->IsEqualTo(CStrVector("_Arguments")))) {
- return false;
- }
- const int count = arguments()->length();
- for (int i = 0; i < count; ++i) {
- if (!arguments()->at(i)->IsInlineable()) return false;
- }
- return true;
-}
-
-
-bool UnaryOperation::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
-bool BinaryOperation::IsInlineable() const {
- return left()->IsInlineable() && right()->IsInlineable();
-}
-
-
-bool CompareOperation::IsInlineable() const {
- return left()->IsInlineable() && right()->IsInlineable();
-}
-
-
-bool CountOperation::IsInlineable() const {
- return expression()->IsInlineable();
-}
-
-
// ----------------------------------------------------------------------------
// Recording of type feedback
void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
// Record type feedback from the oracle in the AST.
+ is_uninitialized_ = oracle->LoadIsUninitialized(this);
+ if (is_uninitialized_) return;
+
is_monomorphic_ = oracle->LoadIsMonomorphicNormal(this);
receiver_types_.Clear();
if (key()->IsPropertyName()) {
@@ -695,6 +466,10 @@ void CaseClause::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
TypeInfo info = oracle->SwitchType(this);
if (info.IsSmi()) {
compare_type_ = SMI_ONLY;
+ } else if (info.IsSymbol()) {
+ compare_type_ = SYMBOL_ONLY;
+ } else if (info.IsNonSymbol()) {
+ compare_type_ = STRING_ONLY;
} else if (info.IsNonPrimitive()) {
compare_type_ = OBJECT_ONLY;
} else {
@@ -703,39 +478,47 @@ void CaseClause::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
}
-static bool CanCallWithoutIC(Handle<JSFunction> target, int arity) {
- SharedFunctionInfo* info = target->shared();
- // If the number of formal parameters of the target function does
- // not match the number of arguments we're passing, we don't want to
- // deal with it. Otherwise, we can call it directly.
- return !target->NeedsArgumentsAdaption() ||
- info->formal_parameter_count() == arity;
-}
-
-
bool Call::ComputeTarget(Handle<Map> type, Handle<String> name) {
+ // If there is an interceptor, we can't compute the target for a direct call.
+ if (type->has_named_interceptor()) return false;
+
if (check_type_ == RECEIVER_MAP_CHECK) {
- // For primitive checks the holder is set up to point to the
- // corresponding prototype object, i.e. one step of the algorithm
- // below has been already performed.
- // For non-primitive checks we clear it to allow computing targets
- // for polymorphic calls.
+ // For primitive checks the holder is set up to point to the corresponding
+ // prototype object, i.e. one step of the algorithm below has been already
+ // performed. For non-primitive checks we clear it to allow computing
+ // targets for polymorphic calls.
holder_ = Handle<JSObject>::null();
}
+ LookupResult lookup(type->GetIsolate());
while (true) {
- LookupResult lookup(type->GetIsolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- // If the function wasn't found directly in the map, we start
- // looking upwards through the prototype chain.
- if (!lookup.IsFound() && type->prototype()->IsJSObject()) {
- holder_ = Handle<JSObject>(JSObject::cast(type->prototype()));
- type = Handle<Map>(holder()->map());
- } else if (lookup.IsProperty() && lookup.type() == CONSTANT_FUNCTION) {
- target_ = Handle<JSFunction>(lookup.GetConstantFunctionFromMap(*type));
- return CanCallWithoutIC(target_, arguments()->length());
- } else {
- return false;
+ if (lookup.IsFound()) {
+ switch (lookup.type()) {
+ case CONSTANT_FUNCTION:
+ // We surely know the target for a constant function.
+ target_ =
+ Handle<JSFunction>(lookup.GetConstantFunctionFromMap(*type));
+ return true;
+ case NORMAL:
+ case FIELD:
+ case CALLBACKS:
+ case HANDLER:
+ case INTERCEPTOR:
+ // We don't know the target.
+ return false;
+ case MAP_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ // Perhaps something interesting is up in the prototype chain...
+ break;
+ }
}
+ // If we reach the end of the prototype chain, we don't know the target.
+ if (!type->prototype()->IsJSObject()) return false;
+ // Go up the prototype chain, recording where we are currently.
+ holder_ = Handle<JSObject>(JSObject::cast(type->prototype()));
+ type = Handle<Map>(holder()->map());
}
}
@@ -744,7 +527,7 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
LookupResult* lookup) {
target_ = Handle<JSFunction>::null();
cell_ = Handle<JSGlobalPropertyCell>::null();
- ASSERT(lookup->IsProperty() &&
+ ASSERT(lookup->IsFound() &&
lookup->type() == NORMAL &&
lookup->holder() == *global);
cell_ = Handle<JSGlobalPropertyCell>(global->GetPropertyCell(lookup));
@@ -752,8 +535,7 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
Handle<JSFunction> candidate(JSFunction::cast(cell_->value()));
// If the function is in new space we assume it's more likely to
// change and thus prefer the general IC code.
- if (!HEAP->InNewSpace(*candidate) &&
- CanCallWithoutIC(candidate, arguments()->length())) {
+ if (!HEAP->InNewSpace(*candidate)) {
target_ = candidate;
return true;
}
@@ -767,11 +549,6 @@ void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
is_monomorphic_ = oracle->CallIsMonomorphic(this);
Property* property = expression()->AsProperty();
if (property == NULL) {
- if (VariableProxy *proxy = expression()->AsVariableProxy()) {
- if (proxy->var()->is_qml_global())
- return;
- }
-
// Function call. Specialize for monomorphic calls.
if (is_monomorphic_) target_ = oracle->GetCallTarget(this);
} else {
@@ -808,6 +585,14 @@ void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
}
+void CallNew::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+ is_monomorphic_ = oracle->CallNewIsMonomorphic(this);
+ if (is_monomorphic_) {
+ target_ = oracle->GetCallNewTarget(this);
+ }
+}
+
+
void CompareOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
TypeInfo info = oracle->CompareType(this);
if (info.IsSmi()) {
@@ -820,6 +605,13 @@ void CompareOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
}
+void ObjectLiteral::Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
+ receiver_type_ = oracle->ObjectLiteralStoreIsMonomorphic(this)
+ ? oracle->GetObjectLiteralStoreMap(this)
+ : Handle<Map>::null();
+}
+
+
// ----------------------------------------------------------------------------
// Implementation of AstVisitor
@@ -883,8 +675,6 @@ FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
FOR_EACH_REG_EXP_TREE_TYPE(MAKE_TYPE_CASE)
#undef MAKE_TYPE_CASE
-RegExpEmpty RegExpEmpty::kInstance;
-
static Interval ListCaptureRegisters(ZoneList<RegExpTree*>* children) {
Interval result = Interval::Empty();
@@ -1202,4 +992,172 @@ CaseClause::CaseClause(Isolate* isolate,
entry_id_(AstNode::GetNextId(isolate)) {
}
+
+#define INCREASE_NODE_COUNT(NodeType) \
+ void AstConstructionVisitor::Visit##NodeType(NodeType* node) { \
+ increase_node_count(); \
+ }
+
+INCREASE_NODE_COUNT(VariableDeclaration)
+INCREASE_NODE_COUNT(FunctionDeclaration)
+INCREASE_NODE_COUNT(ModuleDeclaration)
+INCREASE_NODE_COUNT(ImportDeclaration)
+INCREASE_NODE_COUNT(ExportDeclaration)
+INCREASE_NODE_COUNT(ModuleLiteral)
+INCREASE_NODE_COUNT(ModuleVariable)
+INCREASE_NODE_COUNT(ModulePath)
+INCREASE_NODE_COUNT(ModuleUrl)
+INCREASE_NODE_COUNT(Block)
+INCREASE_NODE_COUNT(ExpressionStatement)
+INCREASE_NODE_COUNT(EmptyStatement)
+INCREASE_NODE_COUNT(IfStatement)
+INCREASE_NODE_COUNT(ContinueStatement)
+INCREASE_NODE_COUNT(BreakStatement)
+INCREASE_NODE_COUNT(ReturnStatement)
+INCREASE_NODE_COUNT(Conditional)
+INCREASE_NODE_COUNT(Literal)
+INCREASE_NODE_COUNT(ObjectLiteral)
+INCREASE_NODE_COUNT(Assignment)
+INCREASE_NODE_COUNT(Throw)
+INCREASE_NODE_COUNT(Property)
+INCREASE_NODE_COUNT(UnaryOperation)
+INCREASE_NODE_COUNT(CountOperation)
+INCREASE_NODE_COUNT(BinaryOperation)
+INCREASE_NODE_COUNT(CompareOperation)
+INCREASE_NODE_COUNT(ThisFunction)
+INCREASE_NODE_COUNT(Call)
+INCREASE_NODE_COUNT(CallNew)
+
+#undef INCREASE_NODE_COUNT
+
+
+void AstConstructionVisitor::VisitWithStatement(WithStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitSwitchStatement(SwitchStatement* node) {
+ increase_node_count();
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitDoWhileStatement(DoWhileStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitWhileStatement(WhileStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitForStatement(ForStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitForInStatement(ForInStatement* node) {
+ increase_node_count();
+ add_flag(kDontSelfOptimize);
+}
+
+
+void AstConstructionVisitor::VisitTryCatchStatement(TryCatchStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitTryFinallyStatement(
+ TryFinallyStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitDebuggerStatement(DebuggerStatement* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitFunctionLiteral(FunctionLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitSharedFunctionInfoLiteral(
+ SharedFunctionInfoLiteral* node) {
+ increase_node_count();
+ add_flag(kDontOptimize);
+ add_flag(kDontInline);
+}
+
+
+void AstConstructionVisitor::VisitVariableProxy(VariableProxy* node) {
+ increase_node_count();
+ // In theory, we'd have to add:
+ // if(node->var()->IsLookupSlot()) { add_flag(kDontInline); }
+ // However, node->var() is usually not bound yet at VariableProxy creation
+ // time, and LOOKUP variables only result from constructs that cannot
+ // be inlined anyway.
+}
+
+
+void AstConstructionVisitor::VisitRegExpLiteral(RegExpLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline); // TODO(1322): Allow materialized literals.
+}
+
+
+void AstConstructionVisitor::VisitArrayLiteral(ArrayLiteral* node) {
+ increase_node_count();
+ add_flag(kDontInline); // TODO(1322): Allow materialized literals.
+}
+
+
+void AstConstructionVisitor::VisitCallRuntime(CallRuntime* node) {
+ increase_node_count();
+ if (node->is_jsruntime()) {
+ // Don't try to inline JS runtime calls because we don't (currently) even
+ // optimize them.
+ add_flag(kDontInline);
+ } else if (node->function()->intrinsic_type == Runtime::INLINE &&
+ (node->name()->IsEqualTo(CStrVector("_ArgumentsLength")) ||
+ node->name()->IsEqualTo(CStrVector("_Arguments")))) {
+ // Don't inline the %_ArgumentsLength or %_Arguments because their
+ // implementation will not work. There is no stack frame to get them
+ // from.
+ add_flag(kDontInline);
+ }
+}
+
+
+Handle<String> Literal::ToString() {
+ if (handle_->IsString()) return Handle<String>::cast(handle_);
+ ASSERT(handle_->IsNumber());
+ char arr[100];
+ Vector<char> buffer(arr, ARRAY_SIZE(arr));
+ const char* str;
+ if (handle_->IsSmi()) {
+ // Optimization only, the heap number case would subsume this.
+ OS::SNPrintF(buffer, "%d", Smi::cast(*handle_)->value());
+ str = arr;
+ } else {
+ str = DoubleToCString(handle_->Number(), buffer);
+ }
+ return FACTORY->NewStringFromAscii(CStrVector(str));
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/ast.h b/src/3rdparty/v8/src/ast.h
index 7d50430..d6c47e2 100644
--- a/src/3rdparty/v8/src/ast.h
+++ b/src/3rdparty/v8/src/ast.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,14 +28,21 @@
#ifndef V8_AST_H_
#define V8_AST_H_
-#include "allocation.h"
-#include "execution.h"
+#include "v8.h"
+
+#include "assembler.h"
#include "factory.h"
+#include "isolate.h"
#include "jsregexp.h"
+#include "list-inl.h"
#include "runtime.h"
#include "small-pointer-list.h"
+#include "smart-array-pointer.h"
#include "token.h"
+#include "utils.h"
#include "variables.h"
+#include "interface.h"
+#include "zone-inl.h"
namespace v8 {
namespace internal {
@@ -53,6 +60,19 @@ namespace internal {
// Nodes of the abstract syntax tree. Only concrete classes are
// enumerated here.
+#define DECLARATION_NODE_LIST(V) \
+ V(VariableDeclaration) \
+ V(FunctionDeclaration) \
+ V(ModuleDeclaration) \
+ V(ImportDeclaration) \
+ V(ExportDeclaration) \
+
+#define MODULE_NODE_LIST(V) \
+ V(ModuleLiteral) \
+ V(ModuleVariable) \
+ V(ModulePath) \
+ V(ModuleUrl)
+
#define STATEMENT_NODE_LIST(V) \
V(Block) \
V(ExpressionStatement) \
@@ -93,17 +113,38 @@ namespace internal {
V(ThisFunction)
#define AST_NODE_LIST(V) \
- V(Declaration) \
+ DECLARATION_NODE_LIST(V) \
+ MODULE_NODE_LIST(V) \
STATEMENT_NODE_LIST(V) \
EXPRESSION_NODE_LIST(V)
// Forward declarations
-class BitVector;
-class DefinitionInfo;
+class AstConstructionVisitor;
+template<class> class AstNodeFactory;
+class AstVisitor;
+class Declaration;
+class Module;
+class BreakableStatement;
+class Expression;
+class IterationStatement;
class MaterializedLiteral;
+class Statement;
class TargetCollector;
class TypeFeedbackOracle;
+class RegExpAlternative;
+class RegExpAssertion;
+class RegExpAtom;
+class RegExpBackReference;
+class RegExpCapture;
+class RegExpCharacterClass;
+class RegExpCompiler;
+class RegExpDisjunction;
+class RegExpEmpty;
+class RegExpLookahead;
+class RegExpQuantifier;
+class RegExpText;
+
#define DEF_FORWARD_DECLARATION(type) class type;
AST_NODE_LIST(DEF_FORWARD_DECLARATION)
#undef DEF_FORWARD_DECLARATION
@@ -117,7 +158,31 @@ typedef ZoneList<Handle<Object> > ZoneObjectList;
#define DECLARE_NODE_TYPE(type) \
virtual void Accept(AstVisitor* v); \
- virtual AstNode::Type node_type() const { return AstNode::k##type; } \
+ virtual AstNode::Type node_type() const { return AstNode::k##type; }
+
+
+enum AstPropertiesFlag {
+ kDontInline,
+ kDontOptimize,
+ kDontSelfOptimize,
+ kDontSoftInline
+};
+
+
+class AstProperties BASE_EMBEDDED {
+ public:
+ class Flags : public EnumSet<AstPropertiesFlag, int> {};
+
+ AstProperties() : node_count_(0) { }
+
+ Flags* flags() { return &flags_; }
+ int node_count() { return node_count_; }
+ void add_node_count(int count) { node_count_ += count; }
+
+ private:
+ Flags flags_;
+ int node_count_;
+};
class AstNode: public ZoneObject {
@@ -136,14 +201,11 @@ class AstNode: public ZoneObject {
// that emit code (function declarations).
static const int kDeclarationsId = 3;
- // Override ZoneObject's new to count allocated AST nodes.
void* operator new(size_t size, Zone* zone) {
- Isolate* isolate = zone->isolate();
- isolate->set_ast_node_count(isolate->ast_node_count() + 1);
return zone->New(static_cast<int>(size));
}
- AstNode() {}
+ AstNode() { }
virtual ~AstNode() { }
@@ -157,6 +219,7 @@ class AstNode: public ZoneObject {
AST_NODE_LIST(DECLARE_NODE_FUNCTIONS)
#undef DECLARE_NODE_FUNCTIONS
+ virtual Declaration* AsDeclaration() { return NULL; }
virtual Statement* AsStatement() { return NULL; }
virtual Expression* AsExpression() { return NULL; }
virtual TargetCollector* AsTargetCollector() { return NULL; }
@@ -164,19 +227,13 @@ class AstNode: public ZoneObject {
virtual IterationStatement* AsIterationStatement() { return NULL; }
virtual MaterializedLiteral* AsMaterializedLiteral() { return NULL; }
- // True if the node is simple enough for us to inline calls containing it.
- virtual bool IsInlineable() const = 0;
-
- static int Count() { return Isolate::Current()->ast_node_count(); }
- static void ResetIds() { Isolate::Current()->set_ast_node_id(0); }
-
protected:
- static unsigned GetNextId(Isolate* isolate) {
+ static int GetNextId(Isolate* isolate) {
return ReserveIdRange(isolate, 1);
}
- static unsigned ReserveIdRange(Isolate* isolate, int n) {
- unsigned tmp = isolate->ast_node_id();
+ static int ReserveIdRange(Isolate* isolate, int n) {
+ int tmp = isolate->ast_node_id();
isolate->set_ast_node_id(tmp + n);
return tmp;
}
@@ -213,6 +270,7 @@ class SmallMapList {
void Reserve(int capacity) { list_.Reserve(capacity); }
void Clear() { list_.Clear(); }
+ void Sort() { list_.Sort(); }
bool is_empty() const { return list_.is_empty(); }
int length() const { return list_.length(); }
@@ -250,10 +308,6 @@ class Expression: public AstNode {
kTest
};
- explicit Expression(Isolate* isolate)
- : id_(GetNextId(isolate)),
- test_id_(GetNextId(isolate)) {}
-
virtual int position() const {
UNREACHABLE();
return 0;
@@ -304,9 +358,14 @@ class Expression: public AstNode {
unsigned id() const { return id_; }
unsigned test_id() const { return test_id_; }
+ protected:
+ explicit Expression(Isolate* isolate)
+ : id_(GetNextId(isolate)),
+ test_id_(GetNextId(isolate)) {}
+
private:
- unsigned id_;
- unsigned test_id_;
+ int id_;
+ int test_id_;
};
@@ -335,7 +394,14 @@ class BreakableStatement: public Statement {
int ExitId() const { return exit_id_; }
protected:
- BreakableStatement(Isolate* isolate, ZoneStringList* labels, Type type);
+ BreakableStatement(Isolate* isolate, ZoneStringList* labels, Type type)
+ : labels_(labels),
+ type_(type),
+ entry_id_(GetNextId(isolate)),
+ exit_id_(GetNextId(isolate)) {
+ ASSERT(labels == NULL || labels->length() > 0);
+ }
+
private:
ZoneStringList* labels_;
@@ -348,15 +414,8 @@ class BreakableStatement: public Statement {
class Block: public BreakableStatement {
public:
- inline Block(Isolate* isolate,
- ZoneStringList* labels,
- int capacity,
- bool is_initializer_block);
-
DECLARE_NODE_TYPE(Block)
- virtual bool IsInlineable() const;
-
void AddStatement(Statement* statement) { statements_.Add(statement); }
ZoneList<Statement*>* statements() { return &statements_; }
@@ -365,6 +424,19 @@ class Block: public BreakableStatement {
Scope* block_scope() const { return block_scope_; }
void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Block(Isolate* isolate,
+ ZoneStringList* labels,
+ int capacity,
+ bool is_initializer_block)
+ : BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
+ statements_(capacity),
+ is_initializer_block_(is_initializer_block),
+ block_scope_(NULL) {
+ }
+
private:
ZoneList<Statement*> statements_;
bool is_initializer_block_;
@@ -374,40 +446,236 @@ class Block: public BreakableStatement {
class Declaration: public AstNode {
public:
+ VariableProxy* proxy() const { return proxy_; }
+ VariableMode mode() const { return mode_; }
+ Scope* scope() const { return scope_; }
+ virtual InitializationFlag initialization() const = 0;
+ virtual bool IsInlineable() const;
+
+ virtual Declaration* AsDeclaration() { return this; }
+
+ protected:
Declaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* fun,
Scope* scope)
: proxy_(proxy),
mode_(mode),
- fun_(fun),
scope_(scope) {
ASSERT(mode == VAR ||
mode == CONST ||
mode == CONST_HARMONY ||
mode == LET);
- // At the moment there are no "const functions"'s in JavaScript...
- ASSERT(fun == NULL || mode == VAR || mode == LET);
}
- DECLARE_NODE_TYPE(Declaration)
-
- VariableProxy* proxy() const { return proxy_; }
- VariableMode mode() const { return mode_; }
- FunctionLiteral* fun() const { return fun_; } // may be NULL
- virtual bool IsInlineable() const;
- Scope* scope() const { return scope_; }
-
private:
VariableProxy* proxy_;
VariableMode mode_;
- FunctionLiteral* fun_;
// Nested scope from which the declaration originated.
Scope* scope_;
};
+class VariableDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(VariableDeclaration)
+
+ virtual InitializationFlag initialization() const {
+ return mode() == VAR ? kCreatedInitialized : kNeedsInitialization;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ VariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ Scope* scope)
+ : Declaration(proxy, mode, scope) {
+ }
+};
+
+
+class FunctionDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(FunctionDeclaration)
+
+ FunctionLiteral* fun() const { return fun_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+ virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ FunctionDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* fun,
+ Scope* scope)
+ : Declaration(proxy, mode, scope),
+ fun_(fun) {
+ // At the moment there are no "const functions" in JavaScript...
+ ASSERT(mode == VAR || mode == LET);
+ ASSERT(fun != NULL);
+ }
+
+ private:
+ FunctionLiteral* fun_;
+};
+
+
+class ModuleDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ModuleDeclaration)
+
+ Module* module() const { return module_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ModuleDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope)
+ : Declaration(proxy, LET, scope),
+ module_(module) {
+ }
+
+ private:
+ Module* module_;
+};
+
+
+class ImportDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ImportDeclaration)
+
+ Module* module() const { return module_; }
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ImportDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope)
+ : Declaration(proxy, LET, scope),
+ module_(module) {
+ }
+
+ private:
+ Module* module_;
+};
+
+
+class ExportDeclaration: public Declaration {
+ public:
+ DECLARE_NODE_TYPE(ExportDeclaration)
+
+ virtual InitializationFlag initialization() const {
+ return kCreatedInitialized;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ExportDeclaration(VariableProxy* proxy,
+ Scope* scope)
+ : Declaration(proxy, LET, scope) {
+ }
+};
+
+
+class Module: public AstNode {
+ public:
+ Interface* interface() const { return interface_; }
+
+ protected:
+ Module() : interface_(Interface::NewModule()) {}
+ explicit Module(Interface* interface) : interface_(interface) {}
+
+ private:
+ Interface* interface_;
+};
+
+
+class ModuleLiteral: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleLiteral)
+
+ Block* body() const { return body_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ModuleLiteral(Block* body, Interface* interface)
+ : Module(interface),
+ body_(body) {
+ }
+
+ private:
+ Block* body_;
+};
+
+
+class ModuleVariable: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleVariable)
+
+ VariableProxy* proxy() const { return proxy_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ inline explicit ModuleVariable(VariableProxy* proxy);
+
+ private:
+ VariableProxy* proxy_;
+};
+
+
+class ModulePath: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModulePath)
+
+ Module* module() const { return module_; }
+ Handle<String> name() const { return name_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ModulePath(Module* module, Handle<String> name)
+ : module_(module),
+ name_(name) {
+ }
+
+ private:
+ Module* module_;
+ Handle<String> name_;
+};
+
+
+class ModuleUrl: public Module {
+ public:
+ DECLARE_NODE_TYPE(ModuleUrl)
+
+ Handle<String> url() const { return url_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ModuleUrl(Handle<String> url) : url_(url) {
+ }
+
+ private:
+ Handle<String> url_;
+};
+
+
class IterationStatement: public BreakableStatement {
public:
// Type testing & conversion.
@@ -424,7 +692,11 @@ class IterationStatement: public BreakableStatement {
Label* continue_target() { return &continue_target_; }
protected:
- inline IterationStatement(Isolate* isolate, ZoneStringList* labels);
+ IterationStatement(Isolate* isolate, ZoneStringList* labels)
+ : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
+ body_(NULL),
+ osr_entry_id_(GetNextId(isolate)) {
+ }
void Initialize(Statement* body) {
body_ = body;
@@ -439,8 +711,6 @@ class IterationStatement: public BreakableStatement {
class DoWhileStatement: public IterationStatement {
public:
- inline DoWhileStatement(Isolate* isolate, ZoneStringList* labels);
-
DECLARE_NODE_TYPE(DoWhileStatement)
void Initialize(Expression* cond, Statement* body) {
@@ -460,7 +730,16 @@ class DoWhileStatement: public IterationStatement {
virtual int StackCheckId() const { return back_edge_id_; }
int BackEdgeId() const { return back_edge_id_; }
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ DoWhileStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ cond_(NULL),
+ condition_position_(-1),
+ continue_id_(GetNextId(isolate)),
+ back_edge_id_(GetNextId(isolate)) {
+ }
private:
Expression* cond_;
@@ -472,8 +751,6 @@ class DoWhileStatement: public IterationStatement {
class WhileStatement: public IterationStatement {
public:
- inline WhileStatement(Isolate* isolate, ZoneStringList* labels);
-
DECLARE_NODE_TYPE(WhileStatement)
void Initialize(Expression* cond, Statement* body) {
@@ -488,13 +765,22 @@ class WhileStatement: public IterationStatement {
void set_may_have_function_literal(bool value) {
may_have_function_literal_ = value;
}
- virtual bool IsInlineable() const;
// Bailout support.
virtual int ContinueId() const { return EntryId(); }
virtual int StackCheckId() const { return body_id_; }
int BodyId() const { return body_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ WhileStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ cond_(NULL),
+ may_have_function_literal_(true),
+ body_id_(GetNextId(isolate)) {
+ }
+
private:
Expression* cond_;
// True if there is a function literal subexpression in the condition.
@@ -505,8 +791,6 @@ class WhileStatement: public IterationStatement {
class ForStatement: public IterationStatement {
public:
- inline ForStatement(Isolate* isolate, ZoneStringList* labels);
-
DECLARE_NODE_TYPE(ForStatement)
void Initialize(Statement* init,
@@ -538,7 +822,20 @@ class ForStatement: public IterationStatement {
bool is_fast_smi_loop() { return loop_variable_ != NULL; }
Variable* loop_variable() { return loop_variable_; }
void set_loop_variable(Variable* var) { loop_variable_ = var; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ForStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ init_(NULL),
+ cond_(NULL),
+ next_(NULL),
+ may_have_function_literal_(true),
+ loop_variable_(NULL),
+ continue_id_(GetNextId(isolate)),
+ body_id_(GetNextId(isolate)) {
+ }
private:
Statement* init_;
@@ -554,8 +851,6 @@ class ForStatement: public IterationStatement {
class ForInStatement: public IterationStatement {
public:
- inline ForInStatement(Isolate* isolate, ZoneStringList* labels);
-
DECLARE_NODE_TYPE(ForInStatement)
void Initialize(Expression* each, Expression* enumerable, Statement* body) {
@@ -566,32 +861,44 @@ class ForInStatement: public IterationStatement {
Expression* each() const { return each_; }
Expression* enumerable() const { return enumerable_; }
- virtual bool IsInlineable() const;
- // Bailout support.
- int AssignmentId() const { return assignment_id_; }
virtual int ContinueId() const { return EntryId(); }
- virtual int StackCheckId() const { return EntryId(); }
+ virtual int StackCheckId() const { return body_id_; }
+ int BodyId() const { return body_id_; }
+ int PrepareId() const { return prepare_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ForInStatement(Isolate* isolate, ZoneStringList* labels)
+ : IterationStatement(isolate, labels),
+ each_(NULL),
+ enumerable_(NULL),
+ body_id_(GetNextId(isolate)),
+ prepare_id_(GetNextId(isolate)) {
+ }
private:
Expression* each_;
Expression* enumerable_;
- int assignment_id_;
+ int body_id_;
+ int prepare_id_;
};
class ExpressionStatement: public Statement {
public:
- explicit ExpressionStatement(Expression* expression)
- : expression_(expression) { }
-
DECLARE_NODE_TYPE(ExpressionStatement)
- virtual bool IsInlineable() const;
-
void set_expression(Expression* e) { expression_ = e; }
Expression* expression() const { return expression_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ExpressionStatement(Expression* expression)
+ : expression_(expression) { }
+
private:
Expression* expression_;
};
@@ -599,13 +906,15 @@ class ExpressionStatement: public Statement {
class ContinueStatement: public Statement {
public:
- explicit ContinueStatement(IterationStatement* target)
- : target_(target) { }
-
DECLARE_NODE_TYPE(ContinueStatement)
IterationStatement* target() const { return target_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ContinueStatement(IterationStatement* target)
+ : target_(target) { }
private:
IterationStatement* target_;
@@ -614,13 +923,15 @@ class ContinueStatement: public Statement {
class BreakStatement: public Statement {
public:
- explicit BreakStatement(BreakableStatement* target)
- : target_(target) { }
-
DECLARE_NODE_TYPE(BreakStatement)
BreakableStatement* target() const { return target_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit BreakStatement(BreakableStatement* target)
+ : target_(target) { }
private:
BreakableStatement* target_;
@@ -629,13 +940,15 @@ class BreakStatement: public Statement {
class ReturnStatement: public Statement {
public:
- explicit ReturnStatement(Expression* expression)
- : expression_(expression) { }
-
DECLARE_NODE_TYPE(ReturnStatement)
Expression* expression() const { return expression_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ReturnStatement(Expression* expression)
+ : expression_(expression) { }
private:
Expression* expression_;
@@ -644,15 +957,17 @@ class ReturnStatement: public Statement {
class WithStatement: public Statement {
public:
- WithStatement(Expression* expression, Statement* statement)
- : expression_(expression), statement_(statement) { }
-
DECLARE_NODE_TYPE(WithStatement)
Expression* expression() const { return expression_; }
Statement* statement() const { return statement_; }
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ WithStatement(Expression* expression, Statement* statement)
+ : expression_(expression),
+ statement_(statement) { }
private:
Expression* expression_;
@@ -684,6 +999,8 @@ class CaseClause: public ZoneObject {
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
bool IsSmiCompare() { return compare_type_ == SMI_ONLY; }
+ bool IsSymbolCompare() { return compare_type_ == SYMBOL_ONLY; }
+ bool IsStringCompare() { return compare_type_ == STRING_ONLY; }
bool IsObjectCompare() { return compare_type_ == OBJECT_ONLY; }
private:
@@ -691,7 +1008,13 @@ class CaseClause: public ZoneObject {
Label body_target_;
ZoneList<Statement*>* statements_;
int position_;
- enum CompareTypeFeedback { NONE, SMI_ONLY, OBJECT_ONLY };
+ enum CompareTypeFeedback {
+ NONE,
+ SMI_ONLY,
+ SYMBOL_ONLY,
+ STRING_ONLY,
+ OBJECT_ONLY
+ };
CompareTypeFeedback compare_type_;
int compare_id_;
int entry_id_;
@@ -700,8 +1023,6 @@ class CaseClause: public ZoneObject {
class SwitchStatement: public BreakableStatement {
public:
- inline SwitchStatement(Isolate* isolate, ZoneStringList* labels);
-
DECLARE_NODE_TYPE(SwitchStatement)
void Initialize(Expression* tag, ZoneList<CaseClause*>* cases) {
@@ -711,7 +1032,14 @@ class SwitchStatement: public BreakableStatement {
Expression* tag() const { return tag_; }
ZoneList<CaseClause*>* cases() const { return cases_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ SwitchStatement(Isolate* isolate, ZoneStringList* labels)
+ : BreakableStatement(isolate, labels, TARGET_FOR_ANONYMOUS),
+ tag_(NULL),
+ cases_(NULL) { }
private:
Expression* tag_;
@@ -726,22 +1054,8 @@ class SwitchStatement: public BreakableStatement {
// given if-statement has a then- or an else-part containing code.
class IfStatement: public Statement {
public:
- IfStatement(Isolate* isolate,
- Expression* condition,
- Statement* then_statement,
- Statement* else_statement)
- : condition_(condition),
- then_statement_(then_statement),
- else_statement_(else_statement),
- if_id_(GetNextId(isolate)),
- then_id_(GetNextId(isolate)),
- else_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(IfStatement)
- virtual bool IsInlineable() const;
-
bool HasThenStatement() const { return !then_statement()->IsEmpty(); }
bool HasElseStatement() const { return !else_statement()->IsEmpty(); }
@@ -753,6 +1067,21 @@ class IfStatement: public Statement {
int ThenId() const { return then_id_; }
int ElseId() const { return else_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ IfStatement(Isolate* isolate,
+ Expression* condition,
+ Statement* then_statement,
+ Statement* else_statement)
+ : condition_(condition),
+ then_statement_(then_statement),
+ else_statement_(else_statement),
+ if_id_(GetNextId(isolate)),
+ then_id_(GetNextId(isolate)),
+ else_id_(GetNextId(isolate)) {
+ }
+
private:
Expression* condition_;
Statement* then_statement_;
@@ -767,7 +1096,7 @@ class IfStatement: public Statement {
// stack in the compiler; this should probably be reworked.
class TargetCollector: public AstNode {
public:
- TargetCollector(): targets_(0) { }
+ TargetCollector() : targets_(0) { }
// Adds a jump target to the collector. The collector stores a pointer not
// a copy of the target to make binding work, so make sure not to pass in
@@ -779,7 +1108,6 @@ class TargetCollector: public AstNode {
virtual TargetCollector* AsTargetCollector() { return this; }
ZoneList<Label*>* targets() { return &targets_; }
- virtual bool IsInlineable() const;
private:
ZoneList<Label*> targets_;
@@ -788,18 +1116,24 @@ class TargetCollector: public AstNode {
class TryStatement: public Statement {
public:
- explicit TryStatement(Block* try_block)
- : try_block_(try_block), escaping_targets_(NULL) { }
-
void set_escaping_targets(ZoneList<Label*>* targets) {
escaping_targets_ = targets;
}
+ int index() const { return index_; }
Block* try_block() const { return try_block_; }
ZoneList<Label*>* escaping_targets() const { return escaping_targets_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ TryStatement(int index, Block* try_block)
+ : index_(index),
+ try_block_(try_block),
+ escaping_targets_(NULL) { }
private:
+ // Unique (per-function) index of this handler. This is not an AST ID.
+ int index_;
+
Block* try_block_;
ZoneList<Label*>* escaping_targets_;
};
@@ -807,23 +1141,26 @@ class TryStatement: public Statement {
class TryCatchStatement: public TryStatement {
public:
- TryCatchStatement(Block* try_block,
+ DECLARE_NODE_TYPE(TryCatchStatement)
+
+ Scope* scope() { return scope_; }
+ Variable* variable() { return variable_; }
+ Block* catch_block() const { return catch_block_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ TryCatchStatement(int index,
+ Block* try_block,
Scope* scope,
Variable* variable,
Block* catch_block)
- : TryStatement(try_block),
+ : TryStatement(index, try_block),
scope_(scope),
variable_(variable),
catch_block_(catch_block) {
}
- DECLARE_NODE_TYPE(TryCatchStatement)
-
- Scope* scope() { return scope_; }
- Variable* variable() { return variable_; }
- Block* catch_block() const { return catch_block_; }
- virtual bool IsInlineable() const;
-
private:
Scope* scope_;
Variable* variable_;
@@ -833,14 +1170,16 @@ class TryCatchStatement: public TryStatement {
class TryFinallyStatement: public TryStatement {
public:
- TryFinallyStatement(Block* try_block, Block* finally_block)
- : TryStatement(try_block),
- finally_block_(finally_block) { }
-
DECLARE_NODE_TYPE(TryFinallyStatement)
Block* finally_block() const { return finally_block_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ TryFinallyStatement(int index, Block* try_block, Block* finally_block)
+ : TryStatement(index, try_block),
+ finally_block_(finally_block) { }
private:
Block* finally_block_;
@@ -850,7 +1189,11 @@ class TryFinallyStatement: public TryStatement {
class DebuggerStatement: public Statement {
public:
DECLARE_NODE_TYPE(DebuggerStatement)
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ DebuggerStatement() {}
};
@@ -858,22 +1201,17 @@ class EmptyStatement: public Statement {
public:
DECLARE_NODE_TYPE(EmptyStatement)
- virtual bool IsInlineable() const;
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ EmptyStatement() {}
};
class Literal: public Expression {
public:
- Literal(Isolate* isolate, Handle<Object> handle)
- : Expression(isolate), handle_(handle) { }
-
DECLARE_NODE_TYPE(Literal)
- // Check if this literal is identical to the other literal.
- bool IsIdenticalTo(const Literal* other) const {
- return handle_.is_identical_to(other->handle_);
- }
-
virtual bool IsPropertyName() {
if (handle_->IsSymbol()) {
uint32_t ignored;
@@ -905,9 +1243,27 @@ class Literal: public Expression {
}
Handle<Object> handle() const { return handle_; }
- virtual bool IsInlineable() const;
+
+ // Support for using Literal as a HashMap key. NOTE: Currently, this works
+ // only for string and number literals!
+ uint32_t Hash() { return ToString()->Hash(); }
+
+ static bool Match(void* literal1, void* literal2) {
+ Handle<String> s1 = static_cast<Literal*>(literal1)->ToString();
+ Handle<String> s2 = static_cast<Literal*>(literal2)->ToString();
+ return s1->Equals(*s2);
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Literal(Isolate* isolate, Handle<Object> handle)
+ : Expression(isolate),
+ handle_(handle) { }
private:
+ Handle<String> ToString();
+
Handle<Object> handle_;
};
@@ -915,15 +1271,6 @@ class Literal: public Expression {
// Base class for literals that needs space in the corresponding JSFunction.
class MaterializedLiteral: public Expression {
public:
- MaterializedLiteral(Isolate* isolate,
- int literal_index,
- bool is_simple,
- int depth)
- : Expression(isolate),
- literal_index_(literal_index),
- is_simple_(is_simple),
- depth_(depth) {}
-
virtual MaterializedLiteral* AsMaterializedLiteral() { return this; }
int literal_index() { return literal_index_; }
@@ -933,7 +1280,16 @@ class MaterializedLiteral: public Expression {
bool is_simple() const { return is_simple_; }
int depth() const { return depth_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ MaterializedLiteral(Isolate* isolate,
+ int literal_index,
+ bool is_simple,
+ int depth)
+ : Expression(isolate),
+ literal_index_(literal_index),
+ is_simple_(is_simple),
+ depth_(depth) {}
private:
int literal_index_;
@@ -959,39 +1315,36 @@ class ObjectLiteral: public MaterializedLiteral {
PROTOTYPE // Property is __proto__.
};
- Property(Literal* key, Expression* value);
- Property(bool is_getter, FunctionLiteral* value);
+ Property(Literal* key, Expression* value, Isolate* isolate);
Literal* key() { return key_; }
Expression* value() { return value_; }
Kind kind() { return kind_; }
+ // Type feedback information.
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ bool IsMonomorphic() { return !receiver_type_.is_null(); }
+ Handle<Map> GetReceiverType() { return receiver_type_; }
+
bool IsCompileTimeValue();
void set_emit_store(bool emit_store);
bool emit_store();
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Property(bool is_getter, FunctionLiteral* value);
+ void set_key(Literal* key) { key_ = key; }
+
private:
Literal* key_;
Expression* value_;
Kind kind_;
bool emit_store_;
+ Handle<Map> receiver_type_;
};
- ObjectLiteral(Isolate* isolate,
- Handle<FixedArray> constant_properties,
- ZoneList<Property*>* properties,
- int literal_index,
- bool is_simple,
- bool fast_elements,
- int depth,
- bool has_function)
- : MaterializedLiteral(isolate, literal_index, is_simple, depth),
- constant_properties_(constant_properties),
- properties_(properties),
- fast_elements_(fast_elements),
- has_function_(has_function) {}
-
DECLARE_NODE_TYPE(ObjectLiteral)
Handle<FixedArray> constant_properties() const {
@@ -1014,6 +1367,29 @@ class ObjectLiteral: public MaterializedLiteral {
kHasFunction = 1 << 1
};
+ struct Accessors: public ZoneObject {
+ Accessors() : getter(NULL), setter(NULL) { }
+ Expression* getter;
+ Expression* setter;
+ };
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ ObjectLiteral(Isolate* isolate,
+ Handle<FixedArray> constant_properties,
+ ZoneList<Property*>* properties,
+ int literal_index,
+ bool is_simple,
+ bool fast_elements,
+ int depth,
+ bool has_function)
+ : MaterializedLiteral(isolate, literal_index, is_simple, depth),
+ constant_properties_(constant_properties),
+ properties_(properties),
+ fast_elements_(fast_elements),
+ has_function_(has_function) {}
+
private:
Handle<FixedArray> constant_properties_;
ZoneList<Property*>* properties_;
@@ -1025,6 +1401,14 @@ class ObjectLiteral: public MaterializedLiteral {
// Node for capturing a regexp literal.
class RegExpLiteral: public MaterializedLiteral {
public:
+ DECLARE_NODE_TYPE(RegExpLiteral)
+
+ Handle<String> pattern() const { return pattern_; }
+ Handle<String> flags() const { return flags_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
RegExpLiteral(Isolate* isolate,
Handle<String> pattern,
Handle<String> flags,
@@ -1033,11 +1417,6 @@ class RegExpLiteral: public MaterializedLiteral {
pattern_(pattern),
flags_(flags) {}
- DECLARE_NODE_TYPE(RegExpLiteral)
-
- Handle<String> pattern() const { return pattern_; }
- Handle<String> flags() const { return flags_; }
-
private:
Handle<String> pattern_;
Handle<String> flags_;
@@ -1047,6 +1426,17 @@ class RegExpLiteral: public MaterializedLiteral {
// for minimizing the work when constructing it at runtime.
class ArrayLiteral: public MaterializedLiteral {
public:
+ DECLARE_NODE_TYPE(ArrayLiteral)
+
+ Handle<FixedArray> constant_elements() const { return constant_elements_; }
+ ZoneList<Expression*>* values() const { return values_; }
+
+ // Return an AST id for an element that is used in simulate instructions.
+ int GetIdForElement(int i) { return first_element_id_ + i; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
ArrayLiteral(Isolate* isolate,
Handle<FixedArray> constant_elements,
ZoneList<Expression*>* values,
@@ -1058,14 +1448,6 @@ class ArrayLiteral: public MaterializedLiteral {
values_(values),
first_element_id_(ReserveIdRange(isolate, values->length())) {}
- DECLARE_NODE_TYPE(ArrayLiteral)
-
- Handle<FixedArray> constant_elements() const { return constant_elements_; }
- ZoneList<Expression*>* values() const { return values_; }
-
- // Return an AST id for an element that is used in simulate instructions.
- int GetIdForElement(int i) { return first_element_id_ + i; }
-
private:
Handle<FixedArray> constant_elements_;
ZoneList<Expression*>* values_;
@@ -1075,68 +1457,63 @@ class ArrayLiteral: public MaterializedLiteral {
class VariableProxy: public Expression {
public:
- VariableProxy(Isolate* isolate, Variable* var);
-
DECLARE_NODE_TYPE(VariableProxy)
virtual bool IsValidLeftHandSide() {
return var_ == NULL ? true : var_->IsValidLeftHandSide();
}
- virtual bool IsInlineable() const;
-
bool IsVariable(Handle<String> n) {
return !is_this() && name().is_identical_to(n);
}
bool IsArguments() { return var_ != NULL && var_->is_arguments(); }
+ bool IsLValue() {
+ return is_lvalue_;
+ }
+
Handle<String> name() const { return name_; }
Variable* var() const { return var_; }
bool is_this() const { return is_this_; }
int position() const { return position_; }
+ Interface* interface() const { return interface_; }
+
void MarkAsTrivial() { is_trivial_ = true; }
+ void MarkAsLValue() { is_lvalue_ = true; }
// Bind this proxy to the variable var.
void BindTo(Variable* var);
protected:
- Handle<String> name_;
- Variable* var_; // resolved variable, or NULL
- bool is_this_;
- bool is_trivial_;
- int position_;
+ template<class> friend class AstNodeFactory;
+
+ VariableProxy(Isolate* isolate, Variable* var);
VariableProxy(Isolate* isolate,
Handle<String> name,
bool is_this,
- int position = RelocInfo::kNoPosition);
+ int position,
+ Interface* interface);
- friend class Scope;
+ Handle<String> name_;
+ Variable* var_; // resolved variable, or NULL
+ bool is_this_;
+ bool is_trivial_;
+ // True if this variable proxy is being used in an assignment
+ // or with a increment/decrement operator.
+ bool is_lvalue_;
+ int position_;
+ Interface* interface_;
};
class Property: public Expression {
public:
- Property(Isolate* isolate,
- Expression* obj,
- Expression* key,
- int pos)
- : Expression(isolate),
- obj_(obj),
- key_(key),
- pos_(pos),
- is_monomorphic_(false),
- is_array_length_(false),
- is_string_length_(false),
- is_string_access_(false),
- is_function_prototype_(false) { }
-
DECLARE_NODE_TYPE(Property)
virtual bool IsValidLeftHandSide() { return true; }
- virtual bool IsInlineable() const;
Expression* obj() const { return obj_; }
Expression* key() const { return key_; }
@@ -1151,6 +1528,25 @@ class Property: public Expression {
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
bool IsArrayLength() { return is_array_length_; }
+ bool IsUninitialized() { return is_uninitialized_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Property(Isolate* isolate,
+ Expression* obj,
+ Expression* key,
+ int pos)
+ : Expression(isolate),
+ obj_(obj),
+ key_(key),
+ pos_(pos),
+ is_monomorphic_(false),
+ is_uninitialized_(false),
+ is_array_length_(false),
+ is_string_length_(false),
+ is_string_access_(false),
+ is_function_prototype_(false) { }
private:
Expression* obj_;
@@ -1159,6 +1555,7 @@ class Property: public Expression {
SmallMapList receiver_types_;
bool is_monomorphic_ : 1;
+ bool is_uninitialized_ : 1;
bool is_array_length_ : 1;
bool is_string_length_ : 1;
bool is_string_access_ : 1;
@@ -1168,23 +1565,8 @@ class Property: public Expression {
class Call: public Expression {
public:
- Call(Isolate* isolate,
- Expression* expression,
- ZoneList<Expression*>* arguments,
- int pos)
- : Expression(isolate),
- expression_(expression),
- arguments_(arguments),
- pos_(pos),
- is_monomorphic_(false),
- check_type_(RECEIVER_MAP_CHECK),
- return_id_(GetNextId(isolate)) {
- }
-
DECLARE_NODE_TYPE(Call)
- virtual bool IsInlineable() const;
-
Expression* expression() const { return expression_; }
ZoneList<Expression*>* arguments() const { return arguments_; }
virtual int position() const { return pos_; }
@@ -1209,6 +1591,21 @@ class Call: public Expression {
bool return_is_recorded_;
#endif
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Call(Isolate* isolate,
+ Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos)
+ : Expression(isolate),
+ expression_(expression),
+ arguments_(arguments),
+ pos_(pos),
+ is_monomorphic_(false),
+ check_type_(RECEIVER_MAP_CHECK),
+ return_id_(GetNextId(isolate)) { }
+
private:
Expression* expression_;
ZoneList<Expression*>* arguments_;
@@ -1227,6 +1624,22 @@ class Call: public Expression {
class CallNew: public Expression {
public:
+ DECLARE_NODE_TYPE(CallNew)
+
+ Expression* expression() const { return expression_; }
+ ZoneList<Expression*>* arguments() const { return arguments_; }
+ virtual int position() const { return pos_; }
+
+ void RecordTypeFeedback(TypeFeedbackOracle* oracle);
+ virtual bool IsMonomorphic() { return is_monomorphic_; }
+ Handle<JSFunction> target() { return target_; }
+
+ // Bailout support.
+ int ReturnId() const { return return_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
CallNew(Isolate* isolate,
Expression* expression,
ZoneList<Expression*>* arguments,
@@ -1234,20 +1647,19 @@ class CallNew: public Expression {
: Expression(isolate),
expression_(expression),
arguments_(arguments),
- pos_(pos) { }
-
- DECLARE_NODE_TYPE(CallNew)
-
- virtual bool IsInlineable() const;
-
- Expression* expression() const { return expression_; }
- ZoneList<Expression*>* arguments() const { return arguments_; }
- virtual int position() const { return pos_; }
+ pos_(pos),
+ is_monomorphic_(false),
+ return_id_(GetNextId(isolate)) { }
private:
Expression* expression_;
ZoneList<Expression*>* arguments_;
int pos_;
+
+ bool is_monomorphic_;
+ Handle<JSFunction> target_;
+
+ int return_id_;
};
@@ -1257,6 +1669,16 @@ class CallNew: public Expression {
// implemented in JavaScript (see "v8natives.js").
class CallRuntime: public Expression {
public:
+ DECLARE_NODE_TYPE(CallRuntime)
+
+ Handle<String> name() const { return name_; }
+ const Runtime::Function* function() const { return function_; }
+ ZoneList<Expression*>* arguments() const { return arguments_; }
+ bool is_jsruntime() const { return function_ == NULL; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
CallRuntime(Isolate* isolate,
Handle<String> name,
const Runtime::Function* function,
@@ -1266,15 +1688,6 @@ class CallRuntime: public Expression {
function_(function),
arguments_(arguments) { }
- DECLARE_NODE_TYPE(CallRuntime)
-
- virtual bool IsInlineable() const;
-
- Handle<String> name() const { return name_; }
- const Runtime::Function* function() const { return function_; }
- ZoneList<Expression*>* arguments() const { return arguments_; }
- bool is_jsruntime() const { return function_ == NULL; }
-
private:
Handle<String> name_;
const Runtime::Function* function_;
@@ -1284,6 +1697,20 @@ class CallRuntime: public Expression {
class UnaryOperation: public Expression {
public:
+ DECLARE_NODE_TYPE(UnaryOperation)
+
+ virtual bool ResultOverwriteAllowed();
+
+ Token::Value op() const { return op_; }
+ Expression* expression() const { return expression_; }
+ virtual int position() const { return pos_; }
+
+ int MaterializeTrueId() { return materialize_true_id_; }
+ int MaterializeFalseId() { return materialize_false_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
UnaryOperation(Isolate* isolate,
Token::Value op,
Expression* expression,
@@ -1301,19 +1728,6 @@ class UnaryOperation: public Expression {
}
}
- DECLARE_NODE_TYPE(UnaryOperation)
-
- virtual bool IsInlineable() const;
-
- virtual bool ResultOverwriteAllowed();
-
- Token::Value op() const { return op_; }
- Expression* expression() const { return expression_; }
- virtual int position() const { return pos_; }
-
- int MaterializeTrueId() { return materialize_true_id_; }
- int MaterializeFalseId() { return materialize_false_id_; }
-
private:
Token::Value op_;
Expression* expression_;
@@ -1328,22 +1742,8 @@ class UnaryOperation: public Expression {
class BinaryOperation: public Expression {
public:
- BinaryOperation(Isolate* isolate,
- Token::Value op,
- Expression* left,
- Expression* right,
- int pos)
- : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) {
- ASSERT(Token::IsBinaryOp(op));
- right_id_ = (op == Token::AND || op == Token::OR)
- ? static_cast<int>(GetNextId(isolate))
- : AstNode::kNoNumber;
- }
-
DECLARE_NODE_TYPE(BinaryOperation)
- virtual bool IsInlineable() const;
-
virtual bool ResultOverwriteAllowed();
Token::Value op() const { return op_; }
@@ -1354,6 +1754,21 @@ class BinaryOperation: public Expression {
// Bailout support.
int RightId() const { return right_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ BinaryOperation(Isolate* isolate,
+ Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos)
+ : Expression(isolate), op_(op), left_(left), right_(right), pos_(pos) {
+ ASSERT(Token::IsBinaryOp(op));
+ right_id_ = (op == Token::AND || op == Token::OR)
+ ? GetNextId(isolate)
+ : AstNode::kNoNumber;
+ }
+
private:
Token::Value op_;
Expression* left_;
@@ -1367,19 +1782,6 @@ class BinaryOperation: public Expression {
class CountOperation: public Expression {
public:
- CountOperation(Isolate* isolate,
- Token::Value op,
- bool is_prefix,
- Expression* expr,
- int pos)
- : Expression(isolate),
- op_(op),
- is_prefix_(is_prefix),
- expression_(expr),
- pos_(pos),
- assignment_id_(GetNextId(isolate)),
- count_id_(GetNextId(isolate)) {}
-
DECLARE_NODE_TYPE(CountOperation)
bool is_prefix() const { return is_prefix_; }
@@ -1395,8 +1797,6 @@ class CountOperation: public Expression {
virtual void MarkAsStatement() { is_prefix_ = true; }
- virtual bool IsInlineable() const;
-
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
@@ -1405,6 +1805,22 @@ class CountOperation: public Expression {
int AssignmentId() const { return assignment_id_; }
int CountId() const { return count_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ CountOperation(Isolate* isolate,
+ Token::Value op,
+ bool is_prefix,
+ Expression* expr,
+ int pos)
+ : Expression(isolate),
+ op_(op),
+ is_prefix_(is_prefix),
+ expression_(expr),
+ pos_(pos),
+ assignment_id_(GetNextId(isolate)),
+ count_id_(GetNextId(isolate)) {}
+
private:
Token::Value op_;
bool is_prefix_;
@@ -1419,20 +1835,6 @@ class CountOperation: public Expression {
class CompareOperation: public Expression {
public:
- CompareOperation(Isolate* isolate,
- Token::Value op,
- Expression* left,
- Expression* right,
- int pos)
- : Expression(isolate),
- op_(op),
- left_(left),
- right_(right),
- pos_(pos),
- compare_type_(NONE) {
- ASSERT(Token::IsCompareOp(op));
- }
-
DECLARE_NODE_TYPE(CompareOperation)
Token::Value op() const { return op_; }
@@ -1440,8 +1842,6 @@ class CompareOperation: public Expression {
Expression* right() const { return right_; }
virtual int position() const { return pos_; }
- virtual bool IsInlineable() const;
-
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
bool IsSmiCompare() { return compare_type_ == SMI_ONLY; }
@@ -1452,6 +1852,23 @@ class CompareOperation: public Expression {
bool IsLiteralCompareUndefined(Expression** expr);
bool IsLiteralCompareNull(Expression** expr);
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ CompareOperation(Isolate* isolate,
+ Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos)
+ : Expression(isolate),
+ op_(op),
+ left_(left),
+ right_(right),
+ pos_(pos),
+ compare_type_(NONE) {
+ ASSERT(Token::IsCompareOp(op));
+ }
+
private:
Token::Value op_;
Expression* left_;
@@ -1465,6 +1882,21 @@ class CompareOperation: public Expression {
class Conditional: public Expression {
public:
+ DECLARE_NODE_TYPE(Conditional)
+
+ Expression* condition() const { return condition_; }
+ Expression* then_expression() const { return then_expression_; }
+ Expression* else_expression() const { return else_expression_; }
+
+ int then_expression_position() const { return then_expression_position_; }
+ int else_expression_position() const { return else_expression_position_; }
+
+ int ThenId() const { return then_id_; }
+ int ElseId() const { return else_id_; }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
Conditional(Isolate* isolate,
Expression* condition,
Expression* then_expression,
@@ -1478,22 +1910,7 @@ class Conditional: public Expression {
then_expression_position_(then_expression_position),
else_expression_position_(else_expression_position),
then_id_(GetNextId(isolate)),
- else_id_(GetNextId(isolate)) {
- }
-
- DECLARE_NODE_TYPE(Conditional)
-
- virtual bool IsInlineable() const;
-
- Expression* condition() const { return condition_; }
- Expression* then_expression() const { return then_expression_; }
- Expression* else_expression() const { return else_expression_; }
-
- int then_expression_position() const { return then_expression_position_; }
- int else_expression_position() const { return else_expression_position_; }
-
- int ThenId() const { return then_id_; }
- int ElseId() const { return else_id_; }
+ else_id_(GetNextId(isolate)) { }
private:
Expression* condition_;
@@ -1508,16 +1925,8 @@ class Conditional: public Expression {
class Assignment: public Expression {
public:
- Assignment(Isolate* isolate,
- Token::Value op,
- Expression* target,
- Expression* value,
- int pos);
-
DECLARE_NODE_TYPE(Assignment)
- virtual bool IsInlineable() const;
-
Assignment* AsSimpleAssignment() { return !is_compound() ? this : NULL; }
Token::Value binary_op() const;
@@ -1549,6 +1958,25 @@ class Assignment: public Expression {
int CompoundLoadId() const { return compound_load_id_; }
int AssignmentId() const { return assignment_id_; }
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Assignment(Isolate* isolate,
+ Token::Value op,
+ Expression* target,
+ Expression* value,
+ int pos);
+
+ template<class Visitor>
+ void Init(Isolate* isolate, AstNodeFactory<Visitor>* factory) {
+ ASSERT(Token::IsAssignmentOp(op_));
+ if (is_compound()) {
+ binary_operation_ =
+ factory->NewBinaryOperation(binary_op(), target_, value_, pos_ + 1);
+ compound_load_id_ = GetNextId(isolate);
+ }
+ }
+
private:
Token::Value op_;
Expression* target_;
@@ -1568,14 +1996,16 @@ class Assignment: public Expression {
class Throw: public Expression {
public:
- Throw(Isolate* isolate, Expression* exception, int pos)
- : Expression(isolate), exception_(exception), pos_(pos) {}
-
DECLARE_NODE_TYPE(Throw)
Expression* exception() const { return exception_; }
virtual int position() const { return pos_; }
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ Throw(Isolate* isolate, Expression* exception, int pos)
+ : Expression(isolate), exception_(exception), pos_(pos) {}
private:
Expression* exception_;
@@ -1591,34 +2021,15 @@ class FunctionLiteral: public Expression {
DECLARATION
};
- FunctionLiteral(Isolate* isolate,
- Handle<String> name,
- Scope* scope,
- ZoneList<Statement*>* body,
- int materialized_literal_count,
- int expected_property_count,
- bool has_only_simple_this_property_assignments,
- Handle<FixedArray> this_property_assignments,
- int num_parameters,
- Type type,
- bool has_duplicate_parameters)
- : Expression(isolate),
- name_(name),
- scope_(scope),
- body_(body),
- materialized_literal_count_(materialized_literal_count),
- expected_property_count_(expected_property_count),
- has_only_simple_this_property_assignments_(
- has_only_simple_this_property_assignments),
- this_property_assignments_(this_property_assignments),
- num_parameters_(num_parameters),
- function_token_position_(RelocInfo::kNoPosition),
- inferred_name_(HEAP->empty_string()),
- is_expression_(type != DECLARATION),
- is_anonymous_(type == ANONYMOUS_EXPRESSION),
- pretenure_(false),
- has_duplicate_parameters_(has_duplicate_parameters) {
- }
+ enum ParameterFlag {
+ kNoDuplicateParameters = 0,
+ kHasDuplicateParameters = 1
+ };
+
+ enum IsFunctionFlag {
+ kGlobalOrEval,
+ kIsFunction
+ };
DECLARE_NODE_TYPE(FunctionLiteral)
@@ -1629,21 +2040,22 @@ class FunctionLiteral: public Expression {
int function_token_position() const { return function_token_position_; }
int start_position() const;
int end_position() const;
- bool is_expression() const { return is_expression_; }
- bool is_anonymous() const { return is_anonymous_; }
- bool strict_mode() const { return strict_mode_flag() == kStrictMode; }
- StrictModeFlag strict_mode_flag() const;
- bool qml_mode() const;
+ int SourceSize() const { return end_position() - start_position(); }
+ bool is_expression() const { return IsExpression::decode(bitfield_); }
+ bool is_anonymous() const { return IsAnonymous::decode(bitfield_); }
+ bool is_classic_mode() const { return language_mode() == CLASSIC_MODE; }
+ LanguageMode language_mode() const;
int materialized_literal_count() { return materialized_literal_count_; }
int expected_property_count() { return expected_property_count_; }
+ int handler_count() { return handler_count_; }
bool has_only_simple_this_property_assignments() {
- return has_only_simple_this_property_assignments_;
+ return HasOnlySimpleThisPropertyAssignments::decode(bitfield_);
}
Handle<FixedArray> this_property_assignments() {
return this_property_assignments_;
}
- int num_parameters() { return num_parameters_; }
+ int parameter_count() { return parameter_count_; }
bool AllowsLazyCompilation();
@@ -1657,45 +2069,98 @@ class FunctionLiteral: public Expression {
inferred_name_ = inferred_name;
}
- bool pretenure() { return pretenure_; }
- void set_pretenure(bool value) { pretenure_ = value; }
- virtual bool IsInlineable() const;
+ bool pretenure() { return Pretenure::decode(bitfield_); }
+ void set_pretenure() { bitfield_ |= Pretenure::encode(true); }
- bool has_duplicate_parameters() { return has_duplicate_parameters_; }
+ bool has_duplicate_parameters() {
+ return HasDuplicateParameters::decode(bitfield_);
+ }
+
+ bool is_function() { return IsFunction::decode(bitfield_) == kIsFunction; }
+
+ int ast_node_count() { return ast_properties_.node_count(); }
+ AstProperties::Flags* flags() { return ast_properties_.flags(); }
+ void set_ast_properties(AstProperties* ast_properties) {
+ ast_properties_ = *ast_properties;
+ }
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ FunctionLiteral(Isolate* isolate,
+ Handle<String> name,
+ Scope* scope,
+ ZoneList<Statement*>* body,
+ int materialized_literal_count,
+ int expected_property_count,
+ int handler_count,
+ bool has_only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments,
+ int parameter_count,
+ Type type,
+ ParameterFlag has_duplicate_parameters,
+ IsFunctionFlag is_function)
+ : Expression(isolate),
+ name_(name),
+ scope_(scope),
+ body_(body),
+ this_property_assignments_(this_property_assignments),
+ inferred_name_(isolate->factory()->empty_string()),
+ materialized_literal_count_(materialized_literal_count),
+ expected_property_count_(expected_property_count),
+ handler_count_(handler_count),
+ parameter_count_(parameter_count),
+ function_token_position_(RelocInfo::kNoPosition) {
+ bitfield_ =
+ HasOnlySimpleThisPropertyAssignments::encode(
+ has_only_simple_this_property_assignments) |
+ IsExpression::encode(type != DECLARATION) |
+ IsAnonymous::encode(type == ANONYMOUS_EXPRESSION) |
+ Pretenure::encode(false) |
+ HasDuplicateParameters::encode(has_duplicate_parameters) |
+ IsFunction::encode(is_function);
+ }
private:
Handle<String> name_;
Scope* scope_;
ZoneList<Statement*>* body_;
+ Handle<FixedArray> this_property_assignments_;
+ Handle<String> inferred_name_;
+ AstProperties ast_properties_;
+
int materialized_literal_count_;
int expected_property_count_;
- bool has_only_simple_this_property_assignments_;
- Handle<FixedArray> this_property_assignments_;
- int num_parameters_;
- int start_position_;
- int end_position_;
+ int handler_count_;
+ int parameter_count_;
int function_token_position_;
- Handle<String> inferred_name_;
- bool is_expression_;
- bool is_anonymous_;
- bool pretenure_;
- bool has_duplicate_parameters_;
+
+ unsigned bitfield_;
+ class HasOnlySimpleThisPropertyAssignments: public BitField<bool, 0, 1> {};
+ class IsExpression: public BitField<bool, 1, 1> {};
+ class IsAnonymous: public BitField<bool, 2, 1> {};
+ class Pretenure: public BitField<bool, 3, 1> {};
+ class HasDuplicateParameters: public BitField<ParameterFlag, 4, 1> {};
+ class IsFunction: public BitField<IsFunctionFlag, 5, 1> {};
};
class SharedFunctionInfoLiteral: public Expression {
public:
- SharedFunctionInfoLiteral(
- Isolate* isolate,
- Handle<SharedFunctionInfo> shared_function_info)
- : Expression(isolate), shared_function_info_(shared_function_info) { }
-
DECLARE_NODE_TYPE(SharedFunctionInfoLiteral)
Handle<SharedFunctionInfo> shared_function_info() const {
return shared_function_info_;
}
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ SharedFunctionInfoLiteral(
+ Isolate* isolate,
+ Handle<SharedFunctionInfo> shared_function_info)
+ : Expression(isolate),
+ shared_function_info_(shared_function_info) { }
private:
Handle<SharedFunctionInfo> shared_function_info_;
@@ -1704,11 +2169,16 @@ class SharedFunctionInfoLiteral: public Expression {
class ThisFunction: public Expression {
public:
- explicit ThisFunction(Isolate* isolate) : Expression(isolate) {}
DECLARE_NODE_TYPE(ThisFunction)
- virtual bool IsInlineable() const;
+
+ protected:
+ template<class> friend class AstNodeFactory;
+
+ explicit ThisFunction(Isolate* isolate): Expression(isolate) {}
};
+#undef DECLARE_NODE_TYPE
+
// ----------------------------------------------------------------------------
// Regular expressions
@@ -2061,13 +2531,23 @@ class RegExpEmpty: public RegExpTree {
virtual bool IsEmpty();
virtual int min_match() { return 0; }
virtual int max_match() { return 0; }
- static RegExpEmpty* GetInstance() { return &kInstance; }
- private:
- static RegExpEmpty kInstance;
+ static RegExpEmpty* GetInstance() {
+ static RegExpEmpty* instance = ::new RegExpEmpty();
+ return instance;
+ }
};
// ----------------------------------------------------------------------------
+// Out-of-line inline constructors (to side-step cyclic dependencies).
+
+inline ModuleVariable::ModuleVariable(VariableProxy* proxy)
+ : Module(proxy->interface()),
+ proxy_(proxy) {
+}
+
+
+// ----------------------------------------------------------------------------
// Basic visitor
// - leaf node visitors are abstract.
@@ -2109,6 +2589,397 @@ class AstVisitor BASE_EMBEDDED {
};
+// ----------------------------------------------------------------------------
+// Construction time visitor.
+
+class AstConstructionVisitor BASE_EMBEDDED {
+ public:
+ AstConstructionVisitor() { }
+
+ AstProperties* ast_properties() { return &properties_; }
+
+ private:
+ template<class> friend class AstNodeFactory;
+
+ // Node visitors.
+#define DEF_VISIT(type) \
+ void Visit##type(type* node);
+ AST_NODE_LIST(DEF_VISIT)
+#undef DEF_VISIT
+
+ void increase_node_count() { properties_.add_node_count(1); }
+ void add_flag(AstPropertiesFlag flag) { properties_.flags()->Add(flag); }
+
+ AstProperties properties_;
+};
+
+
+class AstNullVisitor BASE_EMBEDDED {
+ public:
+ // Node visitors.
+#define DEF_VISIT(type) \
+ void Visit##type(type* node) {}
+ AST_NODE_LIST(DEF_VISIT)
+#undef DEF_VISIT
+};
+
+
+
+// ----------------------------------------------------------------------------
+// AstNode factory
+
+template<class Visitor>
+class AstNodeFactory BASE_EMBEDDED {
+ public:
+ explicit AstNodeFactory(Isolate* isolate)
+ : isolate_(isolate),
+ zone_(isolate_->zone()) { }
+
+ Visitor* visitor() { return &visitor_; }
+
+#define VISIT_AND_RETURN(NodeType, node) \
+ visitor_.Visit##NodeType((node)); \
+ return node;
+
+ VariableDeclaration* NewVariableDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ Scope* scope) {
+ VariableDeclaration* decl =
+ new(zone_) VariableDeclaration(proxy, mode, scope);
+ VISIT_AND_RETURN(VariableDeclaration, decl)
+ }
+
+ FunctionDeclaration* NewFunctionDeclaration(VariableProxy* proxy,
+ VariableMode mode,
+ FunctionLiteral* fun,
+ Scope* scope) {
+ FunctionDeclaration* decl =
+ new(zone_) FunctionDeclaration(proxy, mode, fun, scope);
+ VISIT_AND_RETURN(FunctionDeclaration, decl)
+ }
+
+ ModuleDeclaration* NewModuleDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope) {
+ ModuleDeclaration* decl =
+ new(zone_) ModuleDeclaration(proxy, module, scope);
+ VISIT_AND_RETURN(ModuleDeclaration, decl)
+ }
+
+ ImportDeclaration* NewImportDeclaration(VariableProxy* proxy,
+ Module* module,
+ Scope* scope) {
+ ImportDeclaration* decl =
+ new(zone_) ImportDeclaration(proxy, module, scope);
+ VISIT_AND_RETURN(ImportDeclaration, decl)
+ }
+
+ ExportDeclaration* NewExportDeclaration(VariableProxy* proxy,
+ Scope* scope) {
+ ExportDeclaration* decl =
+ new(zone_) ExportDeclaration(proxy, scope);
+ VISIT_AND_RETURN(ExportDeclaration, decl)
+ }
+
+ ModuleLiteral* NewModuleLiteral(Block* body, Interface* interface) {
+ ModuleLiteral* module = new(zone_) ModuleLiteral(body, interface);
+ VISIT_AND_RETURN(ModuleLiteral, module)
+ }
+
+ ModuleVariable* NewModuleVariable(VariableProxy* proxy) {
+ ModuleVariable* module = new(zone_) ModuleVariable(proxy);
+ VISIT_AND_RETURN(ModuleVariable, module)
+ }
+
+ ModulePath* NewModulePath(Module* origin, Handle<String> name) {
+ ModulePath* module = new(zone_) ModulePath(origin, name);
+ VISIT_AND_RETURN(ModulePath, module)
+ }
+
+ ModuleUrl* NewModuleUrl(Handle<String> url) {
+ ModuleUrl* module = new(zone_) ModuleUrl(url);
+ VISIT_AND_RETURN(ModuleUrl, module)
+ }
+
+ Block* NewBlock(ZoneStringList* labels,
+ int capacity,
+ bool is_initializer_block) {
+ Block* block = new(zone_) Block(
+ isolate_, labels, capacity, is_initializer_block);
+ VISIT_AND_RETURN(Block, block)
+ }
+
+#define STATEMENT_WITH_LABELS(NodeType) \
+ NodeType* New##NodeType(ZoneStringList* labels) { \
+ NodeType* stmt = new(zone_) NodeType(isolate_, labels); \
+ VISIT_AND_RETURN(NodeType, stmt); \
+ }
+ STATEMENT_WITH_LABELS(DoWhileStatement)
+ STATEMENT_WITH_LABELS(WhileStatement)
+ STATEMENT_WITH_LABELS(ForStatement)
+ STATEMENT_WITH_LABELS(ForInStatement)
+ STATEMENT_WITH_LABELS(SwitchStatement)
+#undef STATEMENT_WITH_LABELS
+
+ ExpressionStatement* NewExpressionStatement(Expression* expression) {
+ ExpressionStatement* stmt = new(zone_) ExpressionStatement(expression);
+ VISIT_AND_RETURN(ExpressionStatement, stmt)
+ }
+
+ ContinueStatement* NewContinueStatement(IterationStatement* target) {
+ ContinueStatement* stmt = new(zone_) ContinueStatement(target);
+ VISIT_AND_RETURN(ContinueStatement, stmt)
+ }
+
+ BreakStatement* NewBreakStatement(BreakableStatement* target) {
+ BreakStatement* stmt = new(zone_) BreakStatement(target);
+ VISIT_AND_RETURN(BreakStatement, stmt)
+ }
+
+ ReturnStatement* NewReturnStatement(Expression* expression) {
+ ReturnStatement* stmt = new(zone_) ReturnStatement(expression);
+ VISIT_AND_RETURN(ReturnStatement, stmt)
+ }
+
+ WithStatement* NewWithStatement(Expression* expression,
+ Statement* statement) {
+ WithStatement* stmt = new(zone_) WithStatement(expression, statement);
+ VISIT_AND_RETURN(WithStatement, stmt)
+ }
+
+ IfStatement* NewIfStatement(Expression* condition,
+ Statement* then_statement,
+ Statement* else_statement) {
+ IfStatement* stmt = new(zone_) IfStatement(
+ isolate_, condition, then_statement, else_statement);
+ VISIT_AND_RETURN(IfStatement, stmt)
+ }
+
+ TryCatchStatement* NewTryCatchStatement(int index,
+ Block* try_block,
+ Scope* scope,
+ Variable* variable,
+ Block* catch_block) {
+ TryCatchStatement* stmt = new(zone_) TryCatchStatement(
+ index, try_block, scope, variable, catch_block);
+ VISIT_AND_RETURN(TryCatchStatement, stmt)
+ }
+
+ TryFinallyStatement* NewTryFinallyStatement(int index,
+ Block* try_block,
+ Block* finally_block) {
+ TryFinallyStatement* stmt =
+ new(zone_) TryFinallyStatement(index, try_block, finally_block);
+ VISIT_AND_RETURN(TryFinallyStatement, stmt)
+ }
+
+ DebuggerStatement* NewDebuggerStatement() {
+ DebuggerStatement* stmt = new(zone_) DebuggerStatement();
+ VISIT_AND_RETURN(DebuggerStatement, stmt)
+ }
+
+ EmptyStatement* NewEmptyStatement() {
+ return new(zone_) EmptyStatement();
+ }
+
+ Literal* NewLiteral(Handle<Object> handle) {
+ Literal* lit = new(zone_) Literal(isolate_, handle);
+ VISIT_AND_RETURN(Literal, lit)
+ }
+
+ Literal* NewNumberLiteral(double number) {
+ return NewLiteral(isolate_->factory()->NewNumber(number, TENURED));
+ }
+
+ ObjectLiteral* NewObjectLiteral(
+ Handle<FixedArray> constant_properties,
+ ZoneList<ObjectLiteral::Property*>* properties,
+ int literal_index,
+ bool is_simple,
+ bool fast_elements,
+ int depth,
+ bool has_function) {
+ ObjectLiteral* lit = new(zone_) ObjectLiteral(
+ isolate_, constant_properties, properties, literal_index,
+ is_simple, fast_elements, depth, has_function);
+ VISIT_AND_RETURN(ObjectLiteral, lit)
+ }
+
+ ObjectLiteral::Property* NewObjectLiteralProperty(bool is_getter,
+ FunctionLiteral* value) {
+ ObjectLiteral::Property* prop =
+ new(zone_) ObjectLiteral::Property(is_getter, value);
+ prop->set_key(NewLiteral(value->name()));
+ return prop; // Not an AST node, will not be visited.
+ }
+
+ RegExpLiteral* NewRegExpLiteral(Handle<String> pattern,
+ Handle<String> flags,
+ int literal_index) {
+ RegExpLiteral* lit =
+ new(zone_) RegExpLiteral(isolate_, pattern, flags, literal_index);
+ VISIT_AND_RETURN(RegExpLiteral, lit);
+ }
+
+ ArrayLiteral* NewArrayLiteral(Handle<FixedArray> constant_elements,
+ ZoneList<Expression*>* values,
+ int literal_index,
+ bool is_simple,
+ int depth) {
+ ArrayLiteral* lit = new(zone_) ArrayLiteral(
+ isolate_, constant_elements, values, literal_index, is_simple, depth);
+ VISIT_AND_RETURN(ArrayLiteral, lit)
+ }
+
+ VariableProxy* NewVariableProxy(Variable* var) {
+ VariableProxy* proxy = new(zone_) VariableProxy(isolate_, var);
+ VISIT_AND_RETURN(VariableProxy, proxy)
+ }
+
+ VariableProxy* NewVariableProxy(Handle<String> name,
+ bool is_this,
+ int position = RelocInfo::kNoPosition,
+ Interface* interface =
+ Interface::NewValue()) {
+ VariableProxy* proxy =
+ new(zone_) VariableProxy(isolate_, name, is_this, position, interface);
+ VISIT_AND_RETURN(VariableProxy, proxy)
+ }
+
+ Property* NewProperty(Expression* obj, Expression* key, int pos) {
+ Property* prop = new(zone_) Property(isolate_, obj, key, pos);
+ VISIT_AND_RETURN(Property, prop)
+ }
+
+ Call* NewCall(Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos) {
+ Call* call = new(zone_) Call(isolate_, expression, arguments, pos);
+ VISIT_AND_RETURN(Call, call)
+ }
+
+ CallNew* NewCallNew(Expression* expression,
+ ZoneList<Expression*>* arguments,
+ int pos) {
+ CallNew* call = new(zone_) CallNew(isolate_, expression, arguments, pos);
+ VISIT_AND_RETURN(CallNew, call)
+ }
+
+ CallRuntime* NewCallRuntime(Handle<String> name,
+ const Runtime::Function* function,
+ ZoneList<Expression*>* arguments) {
+ CallRuntime* call =
+ new(zone_) CallRuntime(isolate_, name, function, arguments);
+ VISIT_AND_RETURN(CallRuntime, call)
+ }
+
+ UnaryOperation* NewUnaryOperation(Token::Value op,
+ Expression* expression,
+ int pos) {
+ UnaryOperation* node =
+ new(zone_) UnaryOperation(isolate_, op, expression, pos);
+ VISIT_AND_RETURN(UnaryOperation, node)
+ }
+
+ BinaryOperation* NewBinaryOperation(Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos) {
+ BinaryOperation* node =
+ new(zone_) BinaryOperation(isolate_, op, left, right, pos);
+ VISIT_AND_RETURN(BinaryOperation, node)
+ }
+
+ CountOperation* NewCountOperation(Token::Value op,
+ bool is_prefix,
+ Expression* expr,
+ int pos) {
+ CountOperation* node =
+ new(zone_) CountOperation(isolate_, op, is_prefix, expr, pos);
+ VISIT_AND_RETURN(CountOperation, node)
+ }
+
+ CompareOperation* NewCompareOperation(Token::Value op,
+ Expression* left,
+ Expression* right,
+ int pos) {
+ CompareOperation* node =
+ new(zone_) CompareOperation(isolate_, op, left, right, pos);
+ VISIT_AND_RETURN(CompareOperation, node)
+ }
+
+ Conditional* NewConditional(Expression* condition,
+ Expression* then_expression,
+ Expression* else_expression,
+ int then_expression_position,
+ int else_expression_position) {
+ Conditional* cond = new(zone_) Conditional(
+ isolate_, condition, then_expression, else_expression,
+ then_expression_position, else_expression_position);
+ VISIT_AND_RETURN(Conditional, cond)
+ }
+
+ Assignment* NewAssignment(Token::Value op,
+ Expression* target,
+ Expression* value,
+ int pos) {
+ Assignment* assign =
+ new(zone_) Assignment(isolate_, op, target, value, pos);
+ assign->Init(isolate_, this);
+ VISIT_AND_RETURN(Assignment, assign)
+ }
+
+ Throw* NewThrow(Expression* exception, int pos) {
+ Throw* t = new(zone_) Throw(isolate_, exception, pos);
+ VISIT_AND_RETURN(Throw, t)
+ }
+
+ FunctionLiteral* NewFunctionLiteral(
+ Handle<String> name,
+ Scope* scope,
+ ZoneList<Statement*>* body,
+ int materialized_literal_count,
+ int expected_property_count,
+ int handler_count,
+ bool has_only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments,
+ int parameter_count,
+ FunctionLiteral::ParameterFlag has_duplicate_parameters,
+ FunctionLiteral::Type type,
+ FunctionLiteral::IsFunctionFlag is_function) {
+ FunctionLiteral* lit = new(zone_) FunctionLiteral(
+ isolate_, name, scope, body,
+ materialized_literal_count, expected_property_count, handler_count,
+ has_only_simple_this_property_assignments, this_property_assignments,
+ parameter_count, type, has_duplicate_parameters, is_function);
+ // Top-level literal doesn't count for the AST's properties.
+ if (is_function == FunctionLiteral::kIsFunction) {
+ visitor_.VisitFunctionLiteral(lit);
+ }
+ return lit;
+ }
+
+ SharedFunctionInfoLiteral* NewSharedFunctionInfoLiteral(
+ Handle<SharedFunctionInfo> shared_function_info) {
+ SharedFunctionInfoLiteral* lit =
+ new(zone_) SharedFunctionInfoLiteral(isolate_, shared_function_info);
+ VISIT_AND_RETURN(SharedFunctionInfoLiteral, lit)
+ }
+
+ ThisFunction* NewThisFunction() {
+ ThisFunction* fun = new(zone_) ThisFunction(isolate_);
+ VISIT_AND_RETURN(ThisFunction, fun)
+ }
+
+#undef VISIT_AND_RETURN
+
+ private:
+ Isolate* isolate_;
+ Zone* zone_;
+ Visitor visitor_;
+};
+
+
} } // namespace v8::internal
#endif // V8_AST_H_
diff --git a/src/3rdparty/v8/src/atomicops.h b/src/3rdparty/v8/src/atomicops.h
index 55de87c..e2057ed 100644
--- a/src/3rdparty/v8/src/atomicops.h
+++ b/src/3rdparty/v8/src/atomicops.h
@@ -157,16 +157,7 @@ Atomic64 Release_Load(volatile const Atomic64* ptr);
(defined(V8_HOST_ARCH_IA32) || defined(V8_HOST_ARCH_X64))
#include "atomicops_internals_x86_gcc.h"
#elif defined(__GNUC__) && defined(V8_HOST_ARCH_ARM)
- // We need special handling for QNX as the existing code in
- // atomicops_internals_arm_gcc.h is actually Linux-specific. This is due to
- // it using a magic hard-wired function address for LinuxKernelCmpxchgFunc.
- // The QNX implementation uses the equivalent system call for that platform
- // but is not source compatible.
- #if defined(__QNXNTO__)
- #include "atomicops_internals_arm_qnx.h"
- #else
- #include "atomicops_internals_arm_gcc.h"
- #endif
+#include "atomicops_internals_arm_gcc.h"
#elif defined(__GNUC__) && defined(V8_HOST_ARCH_MIPS)
#include "atomicops_internals_mips_gcc.h"
#else
diff --git a/src/3rdparty/v8/src/atomicops_internals_arm_qnx.h b/src/3rdparty/v8/src/atomicops_internals_arm_qnx.h
deleted file mode 100644
index 39c9850..0000000
--- a/src/3rdparty/v8/src/atomicops_internals_arm_qnx.h
+++ /dev/null
@@ -1,117 +0,0 @@
-// Copyright 2012 Research in Motion. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-
-#ifndef V8_ATOMICOPS_INTERNALS_ARM_QNX_H_
-#define V8_ATOMICOPS_INTERNALS_ARM_QNX_H_
-
-#include <arm/cpuinline.h>
-#include <arm/smpxchg.h>
-
-namespace v8 {
-namespace internal {
-
-inline void MemoryBarrier() {
- __cpu_membarrier();
-}
-
-inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
- Atomic32 old_value,
- Atomic32 new_value) {
- return _smp_cmpxchg(reinterpret_cast<volatile unsigned*>(ptr), old_value, new_value);
-}
-
-inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
- Atomic32 new_value) {
- return _smp_xchg(reinterpret_cast<volatile unsigned*>(ptr), new_value);
-}
-
-inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
- Atomic32 increment) {
- for (;;) {
- // Atomic exchange the old value with an incremented one.
- Atomic32 old_value = *ptr;
- Atomic32 new_value = old_value + increment;
- if (_smp_cmpxchg(reinterpret_cast<volatile unsigned*>(ptr), old_value, new_value)) {
- // The exchange took place as expected.
- return new_value;
- }
- // Otherwise, *ptr changed mid-loop and we need to retry.
- }
-}
-
-inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
- Atomic32 increment) {
- MemoryBarrier();
- return NoBarrier_AtomicIncrement(ptr, increment);
-}
-
-inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
- Atomic32 old_value,
- Atomic32 new_value) {
- return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
-}
-
-inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
- Atomic32 old_value,
- Atomic32 new_value) {
- return NoBarrier_CompareAndSwap(ptr, old_value, new_value);
-}
-
-inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
- *ptr = value;
-}
-
-inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
- *ptr = value;
- MemoryBarrier();
-}
-
-inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
- MemoryBarrier();
- *ptr = value;
-}
-
-inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
- return *ptr;
-}
-
-inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
- Atomic32 value = *ptr;
- MemoryBarrier();
- return value;
-}
-
-inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
- MemoryBarrier();
- return *ptr;
-}
-
-} } // namespace v8::internal
-
-#endif // V8_ATOMICOPS_INTERNALS_ARM_QNX_H_
-
diff --git a/src/3rdparty/v8/src/atomicops_internals_x86_macosx.h b/src/3rdparty/v8/src/atomicops_internals_x86_macosx.h
index 2bac006..bfb02b3 100644
--- a/src/3rdparty/v8/src/atomicops_internals_x86_macosx.h
+++ b/src/3rdparty/v8/src/atomicops_internals_x86_macosx.h
@@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
-inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
+inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value,
Atomic32 new_value) {
Atomic32 prev_value;
@@ -49,7 +49,7 @@ inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
return prev_value;
}
-inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
+inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
Atomic32 new_value) {
Atomic32 old_value;
do {
@@ -59,12 +59,12 @@ inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
return old_value;
}
-inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
+inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
Atomic32 increment) {
return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
}
-inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
+inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
Atomic32 increment) {
return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
}
@@ -73,7 +73,7 @@ inline void MemoryBarrier() {
OSMemoryBarrier();
}
-inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
+inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value,
Atomic32 new_value) {
Atomic32 prev_value;
@@ -87,7 +87,7 @@ inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
return prev_value;
}
-inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
+inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
Atomic32 old_value,
Atomic32 new_value) {
return Acquire_CompareAndSwap(ptr, old_value, new_value);
@@ -97,12 +97,12 @@ inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value;
}
-inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
+inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
*ptr = value;
MemoryBarrier();
}
-inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
+inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
MemoryBarrier();
*ptr = value;
}
@@ -111,13 +111,13 @@ inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
return *ptr;
}
-inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
+inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
Atomic32 value = *ptr;
MemoryBarrier();
return value;
}
-inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
+inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
MemoryBarrier();
return *ptr;
}
@@ -126,7 +126,7 @@ inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
// 64-bit implementation on 64-bit platform
-inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
+inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value,
Atomic64 new_value) {
Atomic64 prev_value;
@@ -140,7 +140,7 @@ inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
return prev_value;
}
-inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
+inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
Atomic64 new_value) {
Atomic64 old_value;
do {
@@ -150,17 +150,17 @@ inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
return old_value;
}
-inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
+inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
Atomic64 increment) {
return OSAtomicAdd64(increment, const_cast<Atomic64*>(ptr));
}
-inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
+inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
Atomic64 increment) {
return OSAtomicAdd64Barrier(increment, const_cast<Atomic64*>(ptr));
}
-inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
+inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value,
Atomic64 new_value) {
Atomic64 prev_value;
@@ -174,7 +174,7 @@ inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
return prev_value;
}
-inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
+inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
Atomic64 old_value,
Atomic64 new_value) {
// The lib kern interface does not distinguish between
@@ -186,12 +186,12 @@ inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value;
}
-inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
+inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
*ptr = value;
MemoryBarrier();
}
-inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
+inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
MemoryBarrier();
*ptr = value;
}
@@ -200,13 +200,13 @@ inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
return *ptr;
}
-inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
+inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
Atomic64 value = *ptr;
MemoryBarrier();
return value;
}
-inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
+inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
MemoryBarrier();
return *ptr;
}
@@ -264,7 +264,7 @@ inline AtomicWord Release_CompareAndSwap(volatile AtomicWord* ptr,
old_value, new_value);
}
-inline void NoBarrier_Store(volatile AtomicWord *ptr, AtomicWord value) {
+inline void NoBarrier_Store(volatile AtomicWord* ptr, AtomicWord value) {
NoBarrier_Store(
reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
}
@@ -279,7 +279,7 @@ inline void Release_Store(volatile AtomicWord* ptr, AtomicWord value) {
reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
}
-inline AtomicWord NoBarrier_Load(volatile const AtomicWord *ptr) {
+inline AtomicWord NoBarrier_Load(volatile const AtomicWord* ptr) {
return NoBarrier_Load(
reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
}
diff --git a/src/3rdparty/v8/src/bignum-dtoa.h b/src/3rdparty/v8/src/bignum-dtoa.h
index ea1acbb..93ec1f7 100644
--- a/src/3rdparty/v8/src/bignum-dtoa.h
+++ b/src/3rdparty/v8/src/bignum-dtoa.h
@@ -44,7 +44,7 @@ enum BignumDtoaMode {
BIGNUM_DTOA_PRECISION
};
-// Converts the given double 'v' to ascii.
+// Converts the given double 'v' to ASCII.
// The result should be interpreted as buffer * 10^(point-length).
// The buffer will be null-terminated.
//
diff --git a/src/3rdparty/v8/src/bootstrapper.cc b/src/3rdparty/v8/src/bootstrapper.cc
index b6ffbc2..0e95b4b 100644
--- a/src/3rdparty/v8/src/bootstrapper.cc
+++ b/src/3rdparty/v8/src/bootstrapper.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -76,22 +76,15 @@ Handle<String> Bootstrapper::NativesSourceLookup(int index) {
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
if (heap->natives_source_cache()->get(index)->IsUndefined()) {
- if (!Snapshot::IsEnabled() || FLAG_new_snapshot) {
- // We can use external strings for the natives.
- Vector<const char> source = Natives::GetRawScriptSource(index);
- NativesExternalStringResource* resource =
- new NativesExternalStringResource(this,
- source.start(),
- source.length());
- Handle<String> source_code =
- factory->NewExternalStringFromAscii(resource);
- heap->natives_source_cache()->set(index, *source_code);
- } else {
- // Old snapshot code can't cope with external strings at all.
- Handle<String> source_code =
- factory->NewStringFromAscii(Natives::GetRawScriptSource(index));
- heap->natives_source_cache()->set(index, *source_code);
- }
+ // We can use external strings for the natives.
+ Vector<const char> source = Natives::GetRawScriptSource(index);
+ NativesExternalStringResource* resource =
+ new NativesExternalStringResource(this,
+ source.start(),
+ source.length());
+ Handle<String> source_code =
+ factory->NewExternalStringFromAscii(resource);
+ heap->natives_source_cache()->set(index, *source_code);
}
Handle<Object> cached_source(heap->natives_source_cache()->get(index));
return Handle<String>::cast(cached_source);
@@ -179,6 +172,10 @@ class Genesis BASE_EMBEDDED {
Handle<JSFunction> GetThrowTypeErrorFunction();
void CreateStrictModeFunctionMaps(Handle<JSFunction> empty);
+
+ // Make the "arguments" and "caller" properties throw a TypeError on access.
+ void PoisonArgumentsAndCaller(Handle<Map> map);
+
// Creates the global objects using the global and the template passed in
// through the API. We call this regardless of whether we are building a
// context from scratch or using a deserialized one from the partial snapshot
@@ -199,7 +196,7 @@ class Genesis BASE_EMBEDDED {
// detached from the other objects in the snapshot.
void HookUpInnerGlobal(Handle<GlobalObject> inner_global);
// New context initialization. Used for creating a context from scratch.
- void InitializeGlobal(Handle<GlobalObject> inner_global,
+ bool InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> empty_function);
void InitializeExperimentalGlobal();
// Installs the contents of the native .js files on the global objects.
@@ -211,12 +208,30 @@ class Genesis BASE_EMBEDDED {
void InstallBuiltinFunctionIds();
void InstallJSFunctionResultCaches();
void InitializeNormalizedMapCaches();
+
+ enum ExtensionTraversalState {
+ UNVISITED, VISITED, INSTALLED
+ };
+
+ class ExtensionStates {
+ public:
+ ExtensionStates();
+ ExtensionTraversalState get_state(RegisteredExtension* extension);
+ void set_state(RegisteredExtension* extension,
+ ExtensionTraversalState state);
+ private:
+ HashMap map_;
+ DISALLOW_COPY_AND_ASSIGN(ExtensionStates);
+ };
+
// Used both for deserialized and from-scratch contexts to add the extensions
// provided.
static bool InstallExtensions(Handle<Context> global_context,
v8::ExtensionConfiguration* extensions);
- static bool InstallExtension(const char* name);
- static bool InstallExtension(v8::RegisteredExtension* current);
+ static bool InstallExtension(const char* name,
+ ExtensionStates* extension_states);
+ static bool InstallExtension(v8::RegisteredExtension* current,
+ ExtensionStates* extension_states);
static void InstallSpecialObjects(Handle<Context> global_context);
bool InstallJSBuiltins(Handle<JSBuiltinsObject> builtins);
bool ConfigureApiObject(Handle<JSObject> object,
@@ -244,14 +259,10 @@ class Genesis BASE_EMBEDDED {
Handle<Map> CreateStrictModeFunctionMap(
PrototypePropertyMode prototype_mode,
- Handle<JSFunction> empty_function,
- Handle<FixedArray> arguments_callbacks,
- Handle<FixedArray> caller_callbacks);
+ Handle<JSFunction> empty_function);
Handle<DescriptorArray> ComputeStrictFunctionInstanceDescriptor(
- PrototypePropertyMode propertyMode,
- Handle<FixedArray> arguments,
- Handle<FixedArray> caller);
+ PrototypePropertyMode propertyMode);
static bool CompileBuiltin(Isolate* isolate, int index);
static bool CompileExperimentalBuiltin(Isolate* isolate, int index);
@@ -280,7 +291,7 @@ class Genesis BASE_EMBEDDED {
void Bootstrapper::Iterate(ObjectVisitor* v) {
extensions_cache_.Iterate(v);
- v->Synchronize("Extensions");
+ v->Synchronize(VisitorSynchronization::kExtensions);
}
@@ -359,7 +370,9 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
} else {
attributes = DONT_ENUM;
}
- SetLocalPropertyNoThrow(target, symbol, function, attributes);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ target, symbol, function, attributes));
if (is_ecma_native) {
function->shared()->set_instance_class_name(*symbol);
}
@@ -370,44 +383,40 @@ static Handle<JSFunction> InstallFunction(Handle<JSObject> target,
Handle<DescriptorArray> Genesis::ComputeFunctionInstanceDescriptor(
PrototypePropertyMode prototypeMode) {
- Handle<DescriptorArray> descriptors =
- factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
- ? 4
- : 5);
- PropertyAttributes attributes =
- static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
+ int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
+ Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
+ PropertyAttributes attribs = static_cast<PropertyAttributes>(
+ DONT_ENUM | DONT_DELETE | READ_ONLY);
DescriptorArray::WhitenessWitness witness(*descriptors);
{ // Add length.
- Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength);
- CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionLength));
+ CallbacksDescriptor d(*factory()->length_symbol(), *f, attribs);
descriptors->Set(0, &d, witness);
}
{ // Add name.
- Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName);
- CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionName));
+ CallbacksDescriptor d(*factory()->name_symbol(), *f, attribs);
descriptors->Set(1, &d, witness);
}
{ // Add arguments.
- Handle<Foreign> foreign =
- factory()->NewForeign(&Accessors::FunctionArguments);
- CallbacksDescriptor d(*factory()->arguments_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionArguments));
+ CallbacksDescriptor d(*factory()->arguments_symbol(), *f, attribs);
descriptors->Set(2, &d, witness);
}
{ // Add caller.
- Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionCaller);
- CallbacksDescriptor d(*factory()->caller_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionCaller));
+ CallbacksDescriptor d(*factory()->caller_symbol(), *f, attribs);
descriptors->Set(3, &d, witness);
}
if (prototypeMode != DONT_ADD_PROTOTYPE) {
// Add prototype.
if (prototypeMode == ADD_WRITEABLE_PROTOTYPE) {
- attributes = static_cast<PropertyAttributes>(attributes & ~READ_ONLY);
+ attribs = static_cast<PropertyAttributes>(attribs & ~READ_ONLY);
}
- Handle<Foreign> foreign =
- factory()->NewForeign(&Accessors::FunctionPrototype);
- CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionPrototype));
+ CallbacksDescriptor d(*factory()->prototype_symbol(), *f, attribs);
descriptors->Set(4, &d, witness);
}
descriptors->Sort(witness);
@@ -483,7 +492,7 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
// 262 15.3.4.
Handle<String> symbol = factory->LookupAsciiSymbol("Empty");
Handle<JSFunction> empty_function =
- factory->NewFunctionWithoutPrototype(symbol, kNonStrictMode);
+ factory->NewFunctionWithoutPrototype(symbol, CLASSIC_MODE);
// --- E m p t y ---
Handle<Code> code =
@@ -518,47 +527,42 @@ Handle<JSFunction> Genesis::CreateEmptyFunction(Isolate* isolate) {
Handle<DescriptorArray> Genesis::ComputeStrictFunctionInstanceDescriptor(
- PrototypePropertyMode prototypeMode,
- Handle<FixedArray> arguments,
- Handle<FixedArray> caller) {
- Handle<DescriptorArray> descriptors =
- factory()->NewDescriptorArray(prototypeMode == DONT_ADD_PROTOTYPE
- ? 4
- : 5);
- PropertyAttributes attributes = static_cast<PropertyAttributes>(
+ PrototypePropertyMode prototypeMode) {
+ int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
+ Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
+ PropertyAttributes attribs = static_cast<PropertyAttributes>(
DONT_ENUM | DONT_DELETE);
DescriptorArray::WhitenessWitness witness(*descriptors);
- { // length
- Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionLength);
- CallbacksDescriptor d(*factory()->length_symbol(), *foreign, attributes);
+ { // Add length.
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionLength));
+ CallbacksDescriptor d(*factory()->length_symbol(), *f, attribs);
descriptors->Set(0, &d, witness);
}
- { // name
- Handle<Foreign> foreign = factory()->NewForeign(&Accessors::FunctionName);
- CallbacksDescriptor d(*factory()->name_symbol(), *foreign, attributes);
+ { // Add name.
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionName));
+ CallbacksDescriptor d(*factory()->name_symbol(), *f, attribs);
descriptors->Set(1, &d, witness);
}
- { // arguments
- CallbacksDescriptor d(*factory()->arguments_symbol(),
- *arguments,
- attributes);
+ { // Add arguments.
+ Handle<AccessorPair> arguments(factory()->NewAccessorPair());
+ CallbacksDescriptor d(*factory()->arguments_symbol(), *arguments, attribs);
descriptors->Set(2, &d, witness);
}
- { // caller
- CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attributes);
+ { // Add caller.
+ Handle<AccessorPair> caller(factory()->NewAccessorPair());
+ CallbacksDescriptor d(*factory()->caller_symbol(), *caller, attribs);
descriptors->Set(3, &d, witness);
}
- // prototype
if (prototypeMode != DONT_ADD_PROTOTYPE) {
+ // Add prototype.
if (prototypeMode != ADD_WRITEABLE_PROTOTYPE) {
- attributes = static_cast<PropertyAttributes>(attributes | READ_ONLY);
+ attribs = static_cast<PropertyAttributes>(attribs | READ_ONLY);
}
- Handle<Foreign> foreign =
- factory()->NewForeign(&Accessors::FunctionPrototype);
- CallbacksDescriptor d(*factory()->prototype_symbol(), *foreign, attributes);
+ Handle<Foreign> f(factory()->NewForeign(&Accessors::FunctionPrototype));
+ CallbacksDescriptor d(*factory()->prototype_symbol(), *f, attribs);
descriptors->Set(4, &d, witness);
}
@@ -572,7 +576,7 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorFunction() {
if (throw_type_error_function.is_null()) {
Handle<String> name = factory()->LookupAsciiSymbol("ThrowTypeError");
throw_type_error_function =
- factory()->NewFunctionWithoutPrototype(name, kNonStrictMode);
+ factory()->NewFunctionWithoutPrototype(name, CLASSIC_MODE);
Handle<Code> code(isolate()->builtins()->builtin(
Builtins::kStrictModePoisonPill));
throw_type_error_function->set_map(
@@ -581,7 +585,7 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorFunction() {
throw_type_error_function->shared()->set_code(*code);
throw_type_error_function->shared()->DontAdaptArguments();
- PreventExtensions(throw_type_error_function);
+ JSObject::PreventExtensions(throw_type_error_function);
}
return throw_type_error_function;
}
@@ -589,14 +593,10 @@ Handle<JSFunction> Genesis::GetThrowTypeErrorFunction() {
Handle<Map> Genesis::CreateStrictModeFunctionMap(
PrototypePropertyMode prototype_mode,
- Handle<JSFunction> empty_function,
- Handle<FixedArray> arguments_callbacks,
- Handle<FixedArray> caller_callbacks) {
+ Handle<JSFunction> empty_function) {
Handle<Map> map = factory()->NewMap(JS_FUNCTION_TYPE, JSFunction::kSize);
Handle<DescriptorArray> descriptors =
- ComputeStrictFunctionInstanceDescriptor(prototype_mode,
- arguments_callbacks,
- caller_callbacks);
+ ComputeStrictFunctionInstanceDescriptor(prototype_mode);
map->set_instance_descriptors(*descriptors);
map->set_function_with_prototype(prototype_mode != DONT_ADD_PROTOTYPE);
map->set_prototype(*empty_function);
@@ -605,23 +605,15 @@ Handle<Map> Genesis::CreateStrictModeFunctionMap(
void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
- // Create the callbacks arrays for ThrowTypeError functions.
- // The get/set callacks are filled in after the maps are created below.
- Factory* factory = empty->GetIsolate()->factory();
- Handle<FixedArray> arguments = factory->NewFixedArray(2, TENURED);
- Handle<FixedArray> caller = factory->NewFixedArray(2, TENURED);
-
// Allocate map for the strict mode function instances.
Handle<Map> strict_mode_function_instance_map =
- CreateStrictModeFunctionMap(
- ADD_WRITEABLE_PROTOTYPE, empty, arguments, caller);
+ CreateStrictModeFunctionMap(ADD_WRITEABLE_PROTOTYPE, empty);
global_context()->set_strict_mode_function_instance_map(
*strict_mode_function_instance_map);
// Allocate map for the prototype-less strict mode instances.
Handle<Map> strict_mode_function_without_prototype_map =
- CreateStrictModeFunctionMap(
- DONT_ADD_PROTOTYPE, empty, arguments, caller);
+ CreateStrictModeFunctionMap(DONT_ADD_PROTOTYPE, empty);
global_context()->set_strict_mode_function_without_prototype_map(
*strict_mode_function_without_prototype_map);
@@ -629,26 +621,38 @@ void Genesis::CreateStrictModeFunctionMaps(Handle<JSFunction> empty) {
// only for processing of builtins.
// Later the map is replaced with writable prototype map, allocated below.
Handle<Map> strict_mode_function_map =
- CreateStrictModeFunctionMap(
- ADD_READONLY_PROTOTYPE, empty, arguments, caller);
+ CreateStrictModeFunctionMap(ADD_READONLY_PROTOTYPE, empty);
global_context()->set_strict_mode_function_map(
*strict_mode_function_map);
// The final map for the strict mode functions. Writeable prototype.
// This map is installed in MakeFunctionInstancePrototypeWritable.
strict_mode_function_instance_map_writable_prototype_ =
- CreateStrictModeFunctionMap(
- ADD_WRITEABLE_PROTOTYPE, empty, arguments, caller);
-
- // Create the ThrowTypeError function instance.
- Handle<JSFunction> throw_function =
- GetThrowTypeErrorFunction();
-
- // Complete the callback fixed arrays.
- arguments->set(0, *throw_function);
- arguments->set(1, *throw_function);
- caller->set(0, *throw_function);
- caller->set(1, *throw_function);
+ CreateStrictModeFunctionMap(ADD_WRITEABLE_PROTOTYPE, empty);
+
+ // Complete the callbacks.
+ PoisonArgumentsAndCaller(strict_mode_function_instance_map);
+ PoisonArgumentsAndCaller(strict_mode_function_without_prototype_map);
+ PoisonArgumentsAndCaller(strict_mode_function_map);
+ PoisonArgumentsAndCaller(
+ strict_mode_function_instance_map_writable_prototype_);
+}
+
+
+static void SetAccessors(Handle<Map> map,
+ Handle<String> name,
+ Handle<JSFunction> func) {
+ DescriptorArray* descs = map->instance_descriptors();
+ int number = descs->Search(*name);
+ AccessorPair* accessors = AccessorPair::cast(descs->GetValue(number));
+ accessors->set_getter(*func);
+ accessors->set_setter(*func);
+}
+
+
+void Genesis::PoisonArgumentsAndCaller(Handle<Map> map) {
+ SetAccessors(map, factory()->arguments_symbol(), GetThrowTypeErrorFunction());
+ SetAccessors(map, factory()->caller_symbol(), GetThrowTypeErrorFunction());
}
@@ -734,11 +738,10 @@ Handle<JSGlobalProxy> Genesis::CreateNewGlobals(
Handle<JSObject> prototype =
Handle<JSObject>(
JSObject::cast(js_global_function->instance_prototype()));
- SetLocalPropertyNoThrow(
- prototype,
- factory()->constructor_symbol(),
- isolate()->object_function(),
- NONE);
+ CHECK_NOT_EMPTY_HANDLE(isolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ prototype, factory()->constructor_symbol(),
+ isolate()->object_function(), NONE));
} else {
Handle<FunctionTemplateInfo> js_global_constructor(
FunctionTemplateInfo::cast(js_global_template->constructor()));
@@ -808,7 +811,6 @@ void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) {
Handle<JSBuiltinsObject> builtins_global(global_context_->builtins());
global_context_->set_extension(*inner_global);
global_context_->set_global(*inner_global);
- global_context_->set_qml_global(*inner_global);
global_context_->set_security_token(*inner_global);
static const PropertyAttributes attributes =
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
@@ -816,7 +818,7 @@ void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) {
factory()->LookupAsciiSymbol("global"),
inner_global,
attributes);
- // Setup the reference from the global object to the builtins object.
+ // Set up the reference from the global object to the builtins object.
JSGlobalObject::cast(*inner_global)->set_builtins(*builtins_global);
TransferNamedProperties(inner_global_from_snapshot, inner_global);
TransferIndexedProperties(inner_global_from_snapshot, inner_global);
@@ -825,7 +827,7 @@ void Genesis::HookUpInnerGlobal(Handle<GlobalObject> inner_global) {
// This is only called if we are not using snapshots. The equivalent
// work in the snapshot case is done in HookUpInnerGlobal.
-void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
+bool Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> empty_function) {
// --- G l o b a l C o n t e x t ---
// Use the empty function as closure (no scope info).
@@ -834,7 +836,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
// Set extension and global object.
global_context()->set_extension(*inner_global);
global_context()->set_global(*inner_global);
- global_context()->set_qml_global(*inner_global);
// Security setup: Set the security token of the global object to
// its the inner global. This makes the security check between two
// different contexts fail by default even in case of global
@@ -846,8 +847,10 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
Heap* heap = isolate->heap();
Handle<String> object_name = Handle<String>(heap->Object_symbol());
- SetLocalPropertyNoThrow(inner_global, object_name,
- isolate->object_function(), DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ inner_global, object_name,
+ isolate->object_function(), DONT_ENUM));
Handle<JSObject> global = Handle<JSObject>(global_context()->global());
@@ -874,15 +877,12 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
factory->NewForeign(&Accessors::ArrayLength),
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE));
- // Cache the fast JavaScript array map
- global_context()->set_js_array_map(array_function->initial_map());
- global_context()->js_array_map()->set_instance_descriptors(
- *array_descriptors);
// array_function is used internally. JS code creating array object should
// search for the 'Array' property on the global object and use that one
// as the constructor. 'Array' property on a global object can be
// overwritten by JS code.
global_context()->set_array_function(*array_function);
+ array_function->initial_map()->set_instance_descriptors(*array_descriptors);
}
{ // --- N u m b e r ---
@@ -927,7 +927,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
{ // --- D a t e ---
// Builtin functions for Date.prototype.
Handle<JSFunction> date_fun =
- InstallFunction(global, "Date", JS_VALUE_TYPE, JSValue::kSize,
+ InstallFunction(global, "Date", JS_DATE_TYPE, JSDate::kSize,
isolate->initial_object_prototype(),
Builtins::kIllegal, true);
@@ -1029,14 +1029,18 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
{ // -- J S O N
Handle<String> name = factory->NewStringFromAscii(CStrVector("JSON"));
- Handle<JSFunction> cons = factory->NewFunction(
- name,
- factory->the_hole_value());
- cons->SetInstancePrototype(global_context()->initial_object_prototype());
+ Handle<JSFunction> cons = factory->NewFunction(name,
+ factory->the_hole_value());
+ { MaybeObject* result = cons->SetInstancePrototype(
+ global_context()->initial_object_prototype());
+ if (result->IsFailure()) return false;
+ }
cons->SetInstanceClassName(*name);
Handle<JSObject> json_object = factory->NewJSObject(cons, TENURED);
ASSERT(json_object->IsJSObject());
- SetLocalPropertyNoThrow(global, name, json_object, DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ global, name, json_object, DONT_ENUM));
global_context()->set_json_object(*json_object);
}
@@ -1066,21 +1070,23 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
global_context()->set_arguments_boilerplate(*result);
// Note: length must be added as the first property and
// callee must be added as the second property.
- SetLocalPropertyNoThrow(result, factory->length_symbol(),
- factory->undefined_value(),
- DONT_ENUM);
- SetLocalPropertyNoThrow(result, factory->callee_symbol(),
- factory->undefined_value(),
- DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ result, factory->length_symbol(),
+ factory->undefined_value(), DONT_ENUM));
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ result, factory->callee_symbol(),
+ factory->undefined_value(), DONT_ENUM));
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->callee_symbol(), &lookup);
- ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsCalleeIndex);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() > Heap::kArgumentsCalleeIndex);
@@ -1119,17 +1125,17 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
// Create the ThrowTypeError functions.
- Handle<FixedArray> callee = factory->NewFixedArray(2, TENURED);
- Handle<FixedArray> caller = factory->NewFixedArray(2, TENURED);
+ Handle<AccessorPair> callee = factory->NewAccessorPair();
+ Handle<AccessorPair> caller = factory->NewAccessorPair();
Handle<JSFunction> throw_function =
GetThrowTypeErrorFunction();
// Install the ThrowTypeError functions.
- callee->set(0, *throw_function);
- callee->set(1, *throw_function);
- caller->set(0, *throw_function);
- caller->set(1, *throw_function);
+ callee->set_getter(*throw_function);
+ callee->set_setter(*throw_function);
+ caller->set_getter(*throw_function);
+ caller->set_setter(*throw_function);
// Create the descriptor array for the arguments object.
Handle<DescriptorArray> descriptors = factory->NewDescriptorArray(3);
@@ -1166,14 +1172,15 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
global_context()->set_strict_mode_arguments_boilerplate(*result);
// Add length property only for strict mode boilerplate.
- SetLocalPropertyNoThrow(result, factory->length_symbol(),
- factory->undefined_value(),
- DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ result, factory->length_symbol(),
+ factory->undefined_value(), DONT_ENUM));
#ifdef DEBUG
LookupResult lookup(isolate);
result->LocalLookup(heap->length_symbol(), &lookup);
- ASSERT(lookup.IsProperty() && (lookup.type() == FIELD));
+ ASSERT(lookup.IsFound() && (lookup.type() == FIELD));
ASSERT(lookup.GetFieldIndex() == Heap::kArgumentsLengthIndex);
ASSERT(result->map()->inobject_properties() > Heap::kArgumentsLengthIndex);
@@ -1238,6 +1245,7 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
global_context()->set_random_seed(*zeroed_byte_array);
memset(zeroed_byte_array->GetDataStartAddress(), 0, kRandomStateSize);
}
+ return true;
}
@@ -1293,6 +1301,12 @@ bool Genesis::CompileNative(Vector<const char> name, Handle<String> source) {
#ifdef ENABLE_DEBUGGER_SUPPORT
isolate->debugger()->set_compiling_natives(true);
#endif
+ // During genesis, the boilerplate for stack overflow won't work until the
+ // environment has been at least partially initialized. Add a stack check
+ // before entering JS code to catch overflow early.
+ StackLimitCheck check(Isolate::Current());
+ if (check.HasOverflowed()) return false;
+
bool result = CompileScriptCached(name,
source,
NULL,
@@ -1336,7 +1350,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
if (cache != NULL) cache->Add(name, function_info);
}
- // Setup the function context. Conceptually, we should clone the
+ // Set up the function context. Conceptually, we should clone the
// function before overwriting the context but since we're in a
// single-threaded environment it is not strictly necessary.
ASSERT(top_context->IsGlobalContext());
@@ -1423,7 +1437,7 @@ bool Genesis::InstallNatives() {
builtins->set_global_context(*global_context());
builtins->set_global_receiver(*builtins);
- // Setup the 'global' properties of the builtins object. The
+ // Set up the 'global' properties of the builtins object. The
// 'global' property that refers to the global object is the only
// way to get from code running in the builtins context to the
// global object.
@@ -1431,9 +1445,11 @@ bool Genesis::InstallNatives() {
static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
Handle<String> global_symbol = factory()->LookupAsciiSymbol("global");
Handle<Object> global_obj(global_context()->global());
- SetLocalPropertyNoThrow(builtins, global_symbol, global_obj, attributes);
+ CHECK_NOT_EMPTY_HANDLE(isolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ builtins, global_symbol, global_obj, attributes));
- // Setup the reference from the global object to the builtins object.
+ // Set up the reference from the global object to the builtins object.
JSGlobalObject::cast(global_context()->global())->set_builtins(*builtins);
// Create a bridge function that has context in the global context.
@@ -1596,16 +1612,13 @@ bool Genesis::InstallNatives() {
// doesn't inherit from Object.prototype.
// To be used only for internal work by builtins. Instances
// must not be leaked to user code.
- // Only works correctly when called as a constructor. The normal
- // Array code uses Array.prototype as prototype when called as
- // a function.
Handle<JSFunction> array_function =
InstallFunction(builtins,
"InternalArray",
JS_ARRAY_TYPE,
JSArray::kSize,
isolate()->initial_object_prototype(),
- Builtins::kArrayCode,
+ Builtins::kInternalArrayCode,
true);
Handle<JSObject> prototype =
factory()->NewJSObject(isolate()->object_function(), TENURED);
@@ -1623,7 +1636,7 @@ bool Genesis::InstallNatives() {
MaybeObject* maybe_map =
array_function->initial_map()->CopyDropTransitions();
Map* new_map;
- if (!maybe_map->To<Map>(&new_map)) return maybe_map;
+ if (!maybe_map->To<Map>(&new_map)) return false;
new_map->set_elements_kind(FAST_ELEMENTS);
array_function->set_initial_map(new_map);
@@ -1637,6 +1650,8 @@ bool Genesis::InstallNatives() {
array_function->initial_map()->set_instance_descriptors(
*array_descriptors);
+
+ global_context()->set_internal_array_function(*array_function);
}
if (FLAG_disable_native_files) {
@@ -1658,7 +1673,7 @@ bool Genesis::InstallNatives() {
InstallNativeFunctions();
// Store the map for the string prototype after the natives has been compiled
- // and the String function has been setup.
+ // and the String function has been set up.
Handle<JSFunction> string_function(global_context()->string_function());
ASSERT(JSObject::cast(
string_function->initial_map()->prototype())->HasFastProperties());
@@ -1720,17 +1735,17 @@ bool Genesis::InstallNatives() {
initial_map->set_prototype(*array_prototype);
// Update map with length accessor from Array and add "index" and "input".
- Handle<Map> array_map(global_context()->js_array_map());
- Handle<DescriptorArray> array_descriptors(
- array_map->instance_descriptors());
- ASSERT_EQ(1, array_descriptors->number_of_descriptors());
-
Handle<DescriptorArray> reresult_descriptors =
factory()->NewDescriptorArray(3);
-
DescriptorArray::WhitenessWitness witness(*reresult_descriptors);
- reresult_descriptors->CopyFrom(0, *array_descriptors, 0, witness);
+ JSFunction* array_function = global_context()->array_function();
+ Handle<DescriptorArray> array_descriptors(
+ array_function->initial_map()->instance_descriptors());
+ int index = array_descriptors->SearchWithCache(heap()->length_symbol());
+ MaybeObject* copy_result =
+ reresult_descriptors->CopyFrom(0, *array_descriptors, index, witness);
+ if (copy_result->IsFailure()) return false;
int enum_index = 0;
{
@@ -1895,25 +1910,28 @@ bool Bootstrapper::InstallExtensions(Handle<Context> global_context,
void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
- Factory* factory = global_context->GetIsolate()->factory();
+ Isolate* isolate = global_context->GetIsolate();
+ Factory* factory = isolate->factory();
HandleScope scope;
- Handle<JSGlobalObject> js_global(
- JSGlobalObject::cast(global_context->global()));
+ Handle<JSGlobalObject> global(JSGlobalObject::cast(global_context->global()));
// Expose the natives in global if a name for it is specified.
if (FLAG_expose_natives_as != NULL && strlen(FLAG_expose_natives_as) != 0) {
- Handle<String> natives_string =
- factory->LookupAsciiSymbol(FLAG_expose_natives_as);
- SetLocalPropertyNoThrow(js_global, natives_string,
- Handle<JSObject>(js_global->builtins()), DONT_ENUM);
+ Handle<String> natives = factory->LookupAsciiSymbol(FLAG_expose_natives_as);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ global, natives,
+ Handle<JSObject>(global->builtins()),
+ DONT_ENUM));
}
- Handle<Object> Error = GetProperty(js_global, "Error");
+ Handle<Object> Error = GetProperty(global, "Error");
if (Error->IsJSObject()) {
Handle<String> name = factory->LookupAsciiSymbol("stackTraceLimit");
- SetLocalPropertyNoThrow(Handle<JSObject>::cast(Error),
- name,
- Handle<Smi>(Smi::FromInt(FLAG_stack_trace_limit)),
- NONE);
+ Handle<Smi> stack_trace_limit(Smi::FromInt(FLAG_stack_trace_limit));
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ Handle<JSObject>::cast(Error), name,
+ stack_trace_limit, NONE));
}
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -1932,11 +1950,39 @@ void Genesis::InstallSpecialObjects(Handle<Context> global_context) {
Handle<String> debug_string =
factory->LookupAsciiSymbol(FLAG_expose_debug_as);
Handle<Object> global_proxy(debug->debug_context()->global_proxy());
- SetLocalPropertyNoThrow(js_global, debug_string, global_proxy, DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ global, debug_string, global_proxy, DONT_ENUM));
}
#endif
}
+static uint32_t Hash(RegisteredExtension* extension) {
+ return v8::internal::ComputePointerHash(extension);
+}
+
+static bool MatchRegisteredExtensions(void* key1, void* key2) {
+ return key1 == key2;
+}
+
+Genesis::ExtensionStates::ExtensionStates()
+ : map_(MatchRegisteredExtensions, 8) { }
+
+Genesis::ExtensionTraversalState Genesis::ExtensionStates::get_state(
+ RegisteredExtension* extension) {
+ i::HashMap::Entry* entry = map_.Lookup(extension, Hash(extension), false);
+ if (entry == NULL) {
+ return UNVISITED;
+ }
+ return static_cast<ExtensionTraversalState>(
+ reinterpret_cast<intptr_t>(entry->value));
+}
+
+void Genesis::ExtensionStates::set_state(RegisteredExtension* extension,
+ ExtensionTraversalState state) {
+ map_.Lookup(extension, Hash(extension), true)->value =
+ reinterpret_cast<void*>(static_cast<intptr_t>(state));
+}
bool Genesis::InstallExtensions(Handle<Context> global_context,
v8::ExtensionConfiguration* extensions) {
@@ -1944,29 +1990,27 @@ bool Genesis::InstallExtensions(Handle<Context> global_context,
// effort. (The external API reads 'ignore'-- does that mean
// we can break the interface?)
- // Clear coloring of extension list
- v8::RegisteredExtension* current = v8::RegisteredExtension::first_extension();
- while (current != NULL) {
- current->set_state(v8::UNVISITED);
- current = current->next();
- }
+
+ ExtensionStates extension_states; // All extensions have state UNVISITED.
// Install auto extensions.
- current = v8::RegisteredExtension::first_extension();
+ v8::RegisteredExtension* current = v8::RegisteredExtension::first_extension();
while (current != NULL) {
if (current->extension()->auto_enable())
- InstallExtension(current);
+ InstallExtension(current, &extension_states);
current = current->next();
}
- if (FLAG_expose_gc) InstallExtension("v8/gc");
- if (FLAG_expose_externalize_string) InstallExtension("v8/externalize");
+ if (FLAG_expose_gc) InstallExtension("v8/gc", &extension_states);
+ if (FLAG_expose_externalize_string) {
+ InstallExtension("v8/externalize", &extension_states);
+ }
if (extensions == NULL) return true;
// Install required extensions
int count = v8::ImplementationUtilities::GetNameCount(extensions);
const char** names = v8::ImplementationUtilities::GetNames(extensions);
for (int i = 0; i < count; i++) {
- if (!InstallExtension(names[i]))
+ if (!InstallExtension(names[i], &extension_states))
return false;
}
@@ -1976,7 +2020,8 @@ bool Genesis::InstallExtensions(Handle<Context> global_context,
// Installs a named extension. This methods is unoptimized and does
// not scale well if we want to support a large number of extensions.
-bool Genesis::InstallExtension(const char* name) {
+bool Genesis::InstallExtension(const char* name,
+ ExtensionStates* extension_states) {
v8::RegisteredExtension* current = v8::RegisteredExtension::first_extension();
// Loop until we find the relevant extension
while (current != NULL) {
@@ -1989,27 +2034,29 @@ bool Genesis::InstallExtension(const char* name) {
"v8::Context::New()", "Cannot find required extension");
return false;
}
- return InstallExtension(current);
+ return InstallExtension(current, extension_states);
}
-bool Genesis::InstallExtension(v8::RegisteredExtension* current) {
+bool Genesis::InstallExtension(v8::RegisteredExtension* current,
+ ExtensionStates* extension_states) {
HandleScope scope;
- if (current->state() == v8::INSTALLED) return true;
+ if (extension_states->get_state(current) == INSTALLED) return true;
// The current node has already been visited so there must be a
// cycle in the dependency graph; fail.
- if (current->state() == v8::VISITED) {
+ if (extension_states->get_state(current) == VISITED) {
v8::Utils::ReportApiFailure(
"v8::Context::New()", "Circular extension dependency");
return false;
}
- ASSERT(current->state() == v8::UNVISITED);
- current->set_state(v8::VISITED);
+ ASSERT(extension_states->get_state(current) == UNVISITED);
+ extension_states->set_state(current, VISITED);
v8::Extension* extension = current->extension();
// Install the extension's dependencies
for (int i = 0; i < extension->dependency_count(); i++) {
- if (!InstallExtension(extension->dependencies()[i])) return false;
+ if (!InstallExtension(extension->dependencies()[i], extension_states))
+ return false;
}
Isolate* isolate = Isolate::Current();
Handle<String> source_code =
@@ -2031,7 +2078,8 @@ bool Genesis::InstallExtension(v8::RegisteredExtension* current) {
current->extension()->name());
isolate->clear_pending_exception();
}
- current->set_state(v8::INSTALLED);
+ extension_states->set_state(current, INSTALLED);
+ isolate->NotifyExtensionInstalled();
return result;
}
@@ -2118,7 +2166,9 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<String> key = Handle<String>(descs->GetKey(i));
int index = descs->GetFieldIndex(i);
Handle<Object> value = Handle<Object>(from->FastPropertyAt(index));
- SetLocalPropertyNoThrow(to, key, value, details.attributes());
+ CHECK_NOT_EMPTY_HANDLE(to->GetIsolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ to, key, value, details.attributes()));
break;
}
case CONSTANT_FUNCTION: {
@@ -2126,7 +2176,9 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<String> key = Handle<String>(descs->GetKey(i));
Handle<JSFunction> fun =
Handle<JSFunction>(descs->GetConstantFunction(i));
- SetLocalPropertyNoThrow(to, key, fun, details.attributes());
+ CHECK_NOT_EMPTY_HANDLE(to->GetIsolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ to, key, fun, details.attributes()));
break;
}
case CALLBACKS: {
@@ -2141,7 +2193,7 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
Handle<Object> callbacks(descs->GetCallbacksObject(i));
PropertyDetails d =
PropertyDetails(details.attributes(), CALLBACKS, details.index());
- SetNormalizedProperty(to, key, callbacks, d);
+ JSObject::SetNormalizedProperty(to, key, callbacks, d);
break;
}
case MAP_TRANSITION:
@@ -2178,7 +2230,9 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from,
value = Handle<Object>(JSGlobalPropertyCell::cast(*value)->value());
}
PropertyDetails details = properties->DetailsAt(i);
- SetLocalPropertyNoThrow(to, key, value, details.attributes());
+ CHECK_NOT_EMPTY_HANDLE(to->GetIsolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ to, key, value, details.attributes()));
}
}
}
@@ -2241,6 +2295,12 @@ Genesis::Genesis(Isolate* isolate,
HandleScope scope;
SaveContext saved_context(isolate);
+ // During genesis, the boilerplate for stack overflow won't work until the
+ // environment has been at least partially initialized. Add a stack check
+ // before entering JS code to catch overflow early.
+ StackLimitCheck check(Isolate::Current());
+ if (check.HasOverflowed()) return;
+
Handle<Context> new_context = Snapshot::NewContextFromSnapshot();
if (!new_context.is_null()) {
global_context_ =
@@ -2267,7 +2327,7 @@ Genesis::Genesis(Isolate* isolate,
Handle<JSGlobalProxy> global_proxy =
CreateNewGlobals(global_template, global_object, &inner_global);
HookUpGlobalProxy(inner_global, global_proxy);
- InitializeGlobal(inner_global, empty_function);
+ if (!InitializeGlobal(inner_global, empty_function)) return;
InstallJSFunctionResultCaches();
InitializeNormalizedMapCaches();
if (!InstallNatives()) return;
diff --git a/src/3rdparty/v8/src/bootstrapper.h b/src/3rdparty/v8/src/bootstrapper.h
index abf61b9..101c2e1 100644
--- a/src/3rdparty/v8/src/bootstrapper.h
+++ b/src/3rdparty/v8/src/bootstrapper.h
@@ -88,7 +88,7 @@ class SourceCodeCache BASE_EMBEDDED {
// context.
class Bootstrapper {
public:
- // Requires: Heap::Setup has been called.
+ // Requires: Heap::SetUp has been called.
void Initialize(bool create_heap_objects);
void TearDown();
diff --git a/src/3rdparty/v8/src/builtins.cc b/src/3rdparty/v8/src/builtins.cc
index e758b9a..0f493e6 100644
--- a/src/3rdparty/v8/src/builtins.cc
+++ b/src/3rdparty/v8/src/builtins.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,6 +33,7 @@
#include "builtins.h"
#include "gdb-jit.h"
#include "ic-inl.h"
+#include "heap-profiler.h"
#include "mark-compact.h"
#include "vm-state-inl.h"
@@ -184,40 +185,46 @@ BUILTIN(EmptyFunction) {
}
-BUILTIN(ArrayCodeGeneric) {
+static MaybeObject* ArrayCodeGenericCommon(Arguments* args,
+ Isolate* isolate,
+ JSFunction* constructor) {
Heap* heap = isolate->heap();
isolate->counters()->array_function_runtime()->Increment();
JSArray* array;
if (CalledAsConstructor(isolate)) {
- array = JSArray::cast(*args.receiver());
+ array = JSArray::cast((*args)[0]);
+ // Initialize elements and length in case later allocations fail so that the
+ // array object is initialized in a valid state.
+ array->set_length(Smi::FromInt(0));
+ array->set_elements(heap->empty_fixed_array());
+ if (!FLAG_smi_only_arrays) {
+ Context* global_context = isolate->context()->global_context();
+ if (array->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ !global_context->object_js_array_map()->IsUndefined()) {
+ array->set_map(Map::cast(global_context->object_js_array_map()));
+ }
+ }
} else {
// Allocate the JS Array
- JSFunction* constructor =
- isolate->context()->global_context()->array_function();
- Object* obj;
- { MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- array = JSArray::cast(obj);
+ MaybeObject* maybe_obj = heap->AllocateJSObject(constructor);
+ if (!maybe_obj->To(&array)) return maybe_obj;
}
- // 'array' now contains the JSArray we should initialize.
- ASSERT(array->HasFastTypeElements());
-
// Optimize the case where there is one argument and the argument is a
// small smi.
- if (args.length() == 2) {
- Object* obj = args[1];
+ if (args->length() == 2) {
+ Object* obj = (*args)[1];
if (obj->IsSmi()) {
int len = Smi::cast(obj)->value();
if (len >= 0 && len < JSObject::kInitialMaxFastElementArray) {
- Object* obj;
+ Object* fixed_array;
{ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ if (!maybe_obj->ToObject(&fixed_array)) return maybe_obj;
}
- MaybeObject* maybe_obj = array->SetContent(FixedArray::cast(obj));
- if (maybe_obj->IsFailure()) return maybe_obj;
+ // We do not use SetContent to skip the unnecessary elements type check.
+ array->set_elements(FixedArray::cast(fixed_array));
+ array->set_length(Smi::cast(obj));
return array;
}
}
@@ -225,83 +232,82 @@ BUILTIN(ArrayCodeGeneric) {
{ MaybeObject* maybe_obj = array->Initialize(0);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- return array->SetElementsLength(args[1]);
+ return array->SetElementsLength((*args)[1]);
}
// Optimize the case where there are no parameters passed.
- if (args.length() == 1) {
+ if (args->length() == 1) {
return array->Initialize(JSArray::kPreallocatedArrayElements);
}
- // Take the arguments as elements.
- int number_of_elements = args.length() - 1;
- Smi* len = Smi::FromInt(number_of_elements);
- Object* obj;
- { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(len->value());
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
-
// Set length and elements on the array.
+ int number_of_elements = args->length() - 1;
MaybeObject* maybe_object =
- array->EnsureCanContainElements(FixedArray::cast(obj));
+ array->EnsureCanContainElements(args, 1, number_of_elements,
+ ALLOW_CONVERTED_DOUBLE_ELEMENTS);
if (maybe_object->IsFailure()) return maybe_object;
- AssertNoAllocation no_gc;
- FixedArray* elms = FixedArray::cast(obj);
- WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
- // Fill in the content
- for (int index = 0; index < number_of_elements; index++) {
- elms->set(index, args[index+1], mode);
+ // Allocate an appropriately typed elements array.
+ MaybeObject* maybe_elms;
+ ElementsKind elements_kind = array->GetElementsKind();
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ maybe_elms = heap->AllocateUninitializedFixedDoubleArray(
+ number_of_elements);
+ } else {
+ maybe_elms = heap->AllocateFixedArrayWithHoles(number_of_elements);
}
+ FixedArrayBase* elms;
+ if (!maybe_elms->To<FixedArrayBase>(&elms)) return maybe_elms;
- array->set_elements(FixedArray::cast(obj));
- array->set_length(len);
+ // Fill in the content
+ switch (array->GetElementsKind()) {
+ case FAST_SMI_ONLY_ELEMENTS: {
+ FixedArray* smi_elms = FixedArray::cast(elms);
+ for (int index = 0; index < number_of_elements; index++) {
+ smi_elms->set(index, (*args)[index+1], SKIP_WRITE_BARRIER);
+ }
+ break;
+ }
+ case FAST_ELEMENTS: {
+ AssertNoAllocation no_gc;
+ WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
+ FixedArray* object_elms = FixedArray::cast(elms);
+ for (int index = 0; index < number_of_elements; index++) {
+ object_elms->set(index, (*args)[index+1], mode);
+ }
+ break;
+ }
+ case FAST_DOUBLE_ELEMENTS: {
+ FixedDoubleArray* double_elms = FixedDoubleArray::cast(elms);
+ for (int index = 0; index < number_of_elements; index++) {
+ double_elms->set(index, (*args)[index+1]->Number());
+ }
+ break;
+ }
+ default:
+ UNREACHABLE();
+ break;
+ }
+ array->set_elements(elms);
+ array->set_length(Smi::FromInt(number_of_elements));
return array;
}
-MUST_USE_RESULT static MaybeObject* AllocateJSArray(Heap* heap) {
- JSFunction* array_function =
- heap->isolate()->context()->global_context()->array_function();
- Object* result;
- { MaybeObject* maybe_result = heap->AllocateJSObject(array_function);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- return result;
-}
-
-
-MUST_USE_RESULT static MaybeObject* AllocateEmptyJSArray(Heap* heap) {
- Object* result;
- { MaybeObject* maybe_result = AllocateJSArray(heap);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- JSArray* result_array = JSArray::cast(result);
- result_array->set_length(Smi::FromInt(0));
- result_array->set_elements(heap->empty_fixed_array());
- return result_array;
+BUILTIN(InternalArrayCodeGeneric) {
+ return ArrayCodeGenericCommon(
+ &args,
+ isolate,
+ isolate->context()->global_context()->internal_array_function());
}
-static void CopyElements(Heap* heap,
- AssertNoAllocation* no_gc,
- FixedArray* dst,
- int dst_index,
- FixedArray* src,
- int src_index,
- int len) {
- ASSERT(dst != src); // Use MoveElements instead.
- ASSERT(dst->map() != HEAP->fixed_cow_array_map());
- ASSERT(len > 0);
- CopyWords(dst->data_start() + dst_index,
- src->data_start() + src_index,
- len);
- WriteBarrierMode mode = dst->GetWriteBarrierMode(*no_gc);
- if (mode == UPDATE_WRITE_BARRIER) {
- heap->RecordWrites(dst->address(), dst->OffsetOfElementAt(dst_index), len);
- }
- heap->incremental_marking()->RecordWrites(dst);
+BUILTIN(ArrayCodeGeneric) {
+ return ArrayCodeGenericCommon(
+ &args,
+ isolate,
+ isolate->context()->global_context()->array_function());
}
@@ -312,6 +318,7 @@ static void MoveElements(Heap* heap,
FixedArray* src,
int src_index,
int len) {
+ if (len == 0) return;
ASSERT(dst->map() != HEAP->fixed_cow_array_map());
memmove(dst->data_start() + dst_index,
src->data_start() + src_index,
@@ -371,9 +378,11 @@ static FixedArray* LeftTrimFixedArray(Heap* heap,
int size_delta = to_trim * kPointerSize;
if (heap->marking()->TransferMark(elms->address(),
elms->address() + size_delta)) {
- MemoryChunk::IncrementLiveBytes(elms->address(), -size_delta);
+ MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
}
+ HEAP_PROFILE(heap, ObjectMoveEvent(elms->address(),
+ elms->address() + size_delta));
return FixedArray::cast(HeapObject::FromAddress(
elms->address() + to_trim * kPointerSize));
}
@@ -385,9 +394,6 @@ static bool ArrayPrototypeHasNoElements(Heap* heap,
// This method depends on non writability of Object and Array prototype
// fields.
if (array_proto->elements() != heap->empty_fixed_array()) return false;
- // Hidden prototype
- array_proto = JSObject::cast(array_proto->GetPrototype());
- ASSERT(array_proto->elements() == heap->empty_fixed_array());
// Object.prototype
Object* proto = array_proto->GetPrototype();
if (proto == heap->null_value()) return false;
@@ -427,7 +433,8 @@ static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
MaybeObject* maybe_array = array->EnsureCanContainElements(
args,
first_added_arg,
- args_length - first_added_arg);
+ args_length - first_added_arg,
+ DONT_ALLOW_DOUBLE_ELEMENTS);
if (maybe_array->IsFailure()) return maybe_array;
return array->elements();
}
@@ -504,10 +511,8 @@ BUILTIN(ArrayPush) {
}
FixedArray* new_elms = FixedArray::cast(obj);
- AssertNoAllocation no_gc;
- if (len > 0) {
- CopyElements(heap, &no_gc, new_elms, 0, elms, 0, len);
- }
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, 0, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
@@ -630,7 +635,8 @@ BUILTIN(ArrayUnshift) {
ASSERT(to_add <= (Smi::kMaxValue - len));
MaybeObject* maybe_object =
- array->EnsureCanContainElements(&args, 1, to_add);
+ array->EnsureCanContainElements(&args, 1, to_add,
+ DONT_ALLOW_DOUBLE_ELEMENTS);
if (maybe_object->IsFailure()) return maybe_object;
if (new_length > elms->length()) {
@@ -641,10 +647,8 @@ BUILTIN(ArrayUnshift) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
FixedArray* new_elms = FixedArray::cast(obj);
- AssertNoAllocation no_gc;
- if (len > 0) {
- CopyElements(heap, &no_gc, new_elms, to_add, elms, 0, len);
- }
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, to_add, len);
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
@@ -742,36 +746,22 @@ BUILTIN(ArraySlice) {
int final = (relative_end < 0) ? Max(len + relative_end, 0)
: Min(relative_end, len);
- // Calculate the length of result array.
- int result_len = final - k;
- if (result_len <= 0) {
- return AllocateEmptyJSArray(heap);
- }
+ ElementsKind elements_kind = JSObject::cast(receiver)->GetElementsKind();
- Object* result;
- { MaybeObject* maybe_result = AllocateJSArray(heap);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- JSArray* result_array = JSArray::cast(result);
+ // Calculate the length of result array.
+ int result_len = Max(final - k, 0);
- { MaybeObject* maybe_result =
- heap->AllocateUninitializedFixedArray(result_len);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- FixedArray* result_elms = FixedArray::cast(result);
+ MaybeObject* maybe_array =
+ heap->AllocateJSArrayAndStorage(elements_kind,
+ result_len,
+ result_len);
+ JSArray* result_array;
+ if (!maybe_array->To(&result_array)) return maybe_array;
- MaybeObject* maybe_object =
- result_array->EnsureCanContainElements(result_elms);
- if (maybe_object->IsFailure()) return maybe_object;
-
- AssertNoAllocation no_gc;
- CopyElements(heap, &no_gc, result_elms, 0, elms, k, result_len);
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, k,
+ FixedArray::cast(result_array->elements()),
+ FAST_ELEMENTS, 0, result_len);
- // Set elements.
- result_array->set_elements(result_elms);
-
- // Set the length.
- result_array->set_length(Smi::FromInt(result_len));
return result_array;
}
@@ -832,39 +822,19 @@ BUILTIN(ArraySplice) {
}
JSArray* result_array = NULL;
- if (actual_delete_count == 0) {
- Object* result;
- { MaybeObject* maybe_result = AllocateEmptyJSArray(heap);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- result_array = JSArray::cast(result);
- } else {
- // Allocate result array.
- Object* result;
- { MaybeObject* maybe_result = AllocateJSArray(heap);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- result_array = JSArray::cast(result);
-
- { MaybeObject* maybe_result =
- heap->AllocateUninitializedFixedArray(actual_delete_count);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- FixedArray* result_elms = FixedArray::cast(result);
+ ElementsKind elements_kind =
+ JSObject::cast(receiver)->GetElementsKind();
+ MaybeObject* maybe_array =
+ heap->AllocateJSArrayAndStorage(elements_kind,
+ actual_delete_count,
+ actual_delete_count);
+ if (!maybe_array->To(&result_array)) return maybe_array;
- AssertNoAllocation no_gc;
+ {
// Fill newly created array.
- CopyElements(heap,
- &no_gc,
- result_elms, 0,
- elms, actual_start,
- actual_delete_count);
-
- // Set elements.
- result_array->set_elements(result_elms);
-
- // Set the length.
- result_array->set_length(Smi::FromInt(actual_delete_count));
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, actual_start,
+ FixedArray::cast(result_array->elements()),
+ FAST_ELEMENTS, 0, actual_delete_count);
}
int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
@@ -879,7 +849,7 @@ BUILTIN(ArraySplice) {
if (trim_array) {
const int delta = actual_delete_count - item_count;
- if (actual_start > 0) {
+ {
AssertNoAllocation no_gc;
MoveElements(heap, &no_gc, elms, delta, elms, 0, actual_start);
}
@@ -911,18 +881,17 @@ BUILTIN(ArraySplice) {
}
FixedArray* new_elms = FixedArray::cast(obj);
- AssertNoAllocation no_gc;
- // Copy the part before actual_start as is.
- if (actual_start > 0) {
- CopyElements(heap, &no_gc, new_elms, 0, elms, 0, actual_start);
- }
- const int to_copy = len - actual_delete_count - actual_start;
- if (to_copy > 0) {
- CopyElements(heap, &no_gc,
- new_elms, actual_start + item_count,
- elms, actual_start + actual_delete_count,
- to_copy);
+ {
+ // Copy the part before actual_start as is.
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
+ new_elms, FAST_ELEMENTS, 0, actual_start);
+ const int to_copy = len - actual_delete_count - actual_start;
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS,
+ actual_start + actual_delete_count,
+ new_elms, FAST_ELEMENTS,
+ actual_start + item_count, to_copy);
}
+
FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
@@ -966,6 +935,7 @@ BUILTIN(ArrayConcat) {
// and calculating total length.
int n_arguments = args.length();
int result_len = 0;
+ ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
for (int i = 0; i < n_arguments; i++) {
Object* arg = args[i];
if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastTypeElements()
@@ -985,54 +955,35 @@ BUILTIN(ArrayConcat) {
if (result_len > FixedArray::kMaxLength) {
return CallJsBuiltin(isolate, "ArrayConcat", args);
}
- }
- if (result_len == 0) {
- return AllocateEmptyJSArray(heap);
+ if (!JSArray::cast(arg)->HasFastSmiOnlyElements()) {
+ elements_kind = FAST_ELEMENTS;
+ }
}
// Allocate result.
- Object* result;
- { MaybeObject* maybe_result = AllocateJSArray(heap);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- JSArray* result_array = JSArray::cast(result);
-
- { MaybeObject* maybe_result =
- heap->AllocateUninitializedFixedArray(result_len);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- FixedArray* result_elms = FixedArray::cast(result);
-
- // Ensure element type transitions happen before copying elements in.
- if (result_array->HasFastSmiOnlyElements()) {
- for (int i = 0; i < n_arguments; i++) {
- JSArray* array = JSArray::cast(args[i]);
- if (!array->HasFastSmiOnlyElements()) {
- result_array->EnsureCanContainNonSmiElements();
- break;
- }
- }
- }
+ JSArray* result_array;
+ MaybeObject* maybe_array =
+ heap->AllocateJSArrayAndStorage(elements_kind,
+ result_len,
+ result_len);
+ if (!maybe_array->To(&result_array)) return maybe_array;
+ if (result_len == 0) return result_array;
// Copy data.
- AssertNoAllocation no_gc;
int start_pos = 0;
+ FixedArray* result_elms(FixedArray::cast(result_array->elements()));
for (int i = 0; i < n_arguments; i++) {
JSArray* array = JSArray::cast(args[i]);
int len = Smi::cast(array->length())->value();
- if (len > 0) {
- FixedArray* elms = FixedArray::cast(array->elements());
- CopyElements(heap, &no_gc, result_elms, start_pos, elms, 0, len);
- start_pos += len;
- }
+ FixedArray* elms = FixedArray::cast(array->elements());
+ CopyObjectToObjectElements(elms, FAST_ELEMENTS, 0,
+ result_elms, FAST_ELEMENTS,
+ start_pos, len);
+ start_pos += len;
}
ASSERT(start_pos == result_len);
- // Set the length and elements.
- result_array->set_length(Smi::FromInt(result_len));
- result_array->set_elements(result_elms);
-
return result_array;
}
@@ -1536,18 +1487,30 @@ static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
}
-static void Generate_ConstructCall_DebugBreak(MacroAssembler* masm) {
- Debug::GenerateConstructCallDebugBreak(masm);
+static void Generate_Return_DebugBreak(MacroAssembler* masm) {
+ Debug::GenerateReturnDebugBreak(masm);
}
-static void Generate_Return_DebugBreak(MacroAssembler* masm) {
- Debug::GenerateReturnDebugBreak(masm);
+static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
+ Debug::GenerateCallFunctionStubDebugBreak(masm);
+}
+
+
+static void Generate_CallFunctionStub_Recording_DebugBreak(
+ MacroAssembler* masm) {
+ Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
}
-static void Generate_StubNoRegisters_DebugBreak(MacroAssembler* masm) {
- Debug::GenerateStubNoRegistersDebugBreak(masm);
+static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
+ Debug::GenerateCallConstructStubDebugBreak(masm);
+}
+
+
+static void Generate_CallConstructStub_Recording_DebugBreak(
+ MacroAssembler* masm) {
+ Debug::GenerateCallConstructStubRecordDebugBreak(masm);
}
@@ -1604,30 +1567,30 @@ struct BuiltinDesc {
BuiltinExtraArguments extra_args;
};
+#define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
+
class BuiltinFunctionTable {
public:
- BuiltinFunctionTable() {
- Builtins::InitBuiltinFunctionTable();
+ BuiltinDesc* functions() {
+ CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
+ return functions_;
}
- static const BuiltinDesc* functions() { return functions_; }
-
- private:
- static BuiltinDesc functions_[Builtins::builtin_count + 1];
+ OnceType once_;
+ BuiltinDesc functions_[Builtins::builtin_count + 1];
friend class Builtins;
};
-BuiltinDesc BuiltinFunctionTable::functions_[Builtins::builtin_count + 1];
-
-static const BuiltinFunctionTable builtin_function_table_init;
+static BuiltinFunctionTable builtin_function_table =
+ BUILTIN_FUNCTION_TABLE_INIT;
// Define array of pointers to generators and C builtin functions.
// We do this in a sort of roundabout way so that we can do the initialization
// within the lexical scope of Builtins:: and within a context where
// Code::Flags names a non-abstract type.
void Builtins::InitBuiltinFunctionTable() {
- BuiltinDesc* functions = BuiltinFunctionTable::functions_;
+ BuiltinDesc* functions = builtin_function_table.functions_;
functions[builtin_count].generator = NULL;
functions[builtin_count].c_code = NULL;
functions[builtin_count].s_name = NULL;
@@ -1663,7 +1626,7 @@ void Builtins::InitBuiltinFunctionTable() {
#undef DEF_FUNCTION_PTR_A
}
-void Builtins::Setup(bool create_heap_objects) {
+void Builtins::SetUp(bool create_heap_objects) {
ASSERT(!initialized_);
Isolate* isolate = Isolate::Current();
Heap* heap = isolate->heap();
@@ -1671,7 +1634,7 @@ void Builtins::Setup(bool create_heap_objects) {
// Create a scope for the handles in the builtins.
HandleScope scope(isolate);
- const BuiltinDesc* functions = BuiltinFunctionTable::functions();
+ const BuiltinDesc* functions = builtin_function_table.functions();
// For now we generate builtin adaptor code into a stack-allocated
// buffer, before copying it into individual code objects. Be careful
diff --git a/src/3rdparty/v8/src/builtins.h b/src/3rdparty/v8/src/builtins.h
index 24059e7..f079139 100644
--- a/src/3rdparty/v8/src/builtins.h
+++ b/src/3rdparty/v8/src/builtins.h
@@ -44,6 +44,7 @@ enum BuiltinExtraArguments {
\
V(EmptyFunction, NO_EXTRA_ARGUMENTS) \
\
+ V(InternalArrayCodeGeneric, NO_EXTRA_ARGUMENTS) \
V(ArrayCodeGeneric, NO_EXTRA_ARGUMENTS) \
\
V(ArrayPush, NO_EXTRA_ARGUMENTS) \
@@ -66,8 +67,6 @@ enum BuiltinExtraArguments {
#define BUILTIN_LIST_A(V) \
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
- V(JSConstructCall, BUILTIN, UNINITIALIZED, \
- Code::kNoExtraICState) \
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
@@ -178,6 +177,8 @@ enum BuiltinExtraArguments {
V(FunctionApply, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
\
+ V(InternalArrayCode, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
V(ArrayCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(ArrayConstructCode, BUILTIN, UNINITIALIZED, \
@@ -192,27 +193,31 @@ enum BuiltinExtraArguments {
#ifdef ENABLE_DEBUGGER_SUPPORT
// Define list of builtins used by the debugger implemented in assembly.
-#define BUILTIN_LIST_DEBUG_A(V) \
- V(Return_DebugBreak, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(ConstructCall_DebugBreak, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(StubNoRegisters_DebugBreak, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(LoadIC_DebugBreak, LOAD_IC, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(KeyedLoadIC_DebugBreak, KEYED_LOAD_IC, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(StoreIC_DebugBreak, STORE_IC, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(KeyedStoreIC_DebugBreak, KEYED_STORE_IC, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(Slot_DebugBreak, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(PlainReturn_LiveEdit, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState) \
- V(FrameDropper_LiveEdit, BUILTIN, DEBUG_BREAK, \
- Code::kNoExtraICState)
+#define BUILTIN_LIST_DEBUG_A(V) \
+ V(Return_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(CallFunctionStub_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(CallFunctionStub_Recording_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(CallConstructStub_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(CallConstructStub_Recording_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(LoadIC_DebugBreak, LOAD_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(KeyedLoadIC_DebugBreak, KEYED_LOAD_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(StoreIC_DebugBreak, STORE_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(KeyedStoreIC_DebugBreak, KEYED_STORE_IC, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(Slot_DebugBreak, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(PlainReturn_LiveEdit, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState) \
+ V(FrameDropper_LiveEdit, BUILTIN, DEBUG_BREAK, \
+ Code::kNoExtraICState)
#else
#define BUILTIN_LIST_DEBUG_A(V)
#endif
@@ -262,7 +267,7 @@ class Builtins {
// Generate all builtin code objects. Should be called once during
// isolate initialization.
- void Setup(bool create_heap_objects);
+ void SetUp(bool create_heap_objects);
void TearDown();
// Garbage collection support.
@@ -343,7 +348,6 @@ class Builtins {
static void Generate_Adaptor(MacroAssembler* masm,
CFunctionId id,
BuiltinExtraArguments extra_args);
- static void Generate_JSConstructCall(MacroAssembler* masm);
static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
@@ -359,6 +363,7 @@ class Builtins {
static void Generate_FunctionCall(MacroAssembler* masm);
static void Generate_FunctionApply(MacroAssembler* masm);
+ static void Generate_InternalArrayCode(MacroAssembler* masm);
static void Generate_ArrayCode(MacroAssembler* masm);
static void Generate_ArrayConstructCode(MacroAssembler* masm);
diff --git a/src/3rdparty/v8/src/bytecodes-irregexp.h b/src/3rdparty/v8/src/bytecodes-irregexp.h
index 93218ea..c7cc66e 100644
--- a/src/3rdparty/v8/src/bytecodes-irregexp.h
+++ b/src/3rdparty/v8/src/bytecodes-irregexp.h
@@ -1,4 +1,4 @@
-// Copyright 2008-2009 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,12 +33,12 @@ namespace v8 {
namespace internal {
-static const int BYTECODE_MASK = 0xff;
+const int BYTECODE_MASK = 0xff;
// The first argument is packed in with the byte code in one word, but so it
// has 24 bits, but it can be positive and negative so only use 23 bits for
// positive values.
-static const unsigned int MAX_FIRST_ARG = 0x7fffffu;
-static const int BYTECODE_SHIFT = 8;
+const unsigned int MAX_FIRST_ARG = 0x7fffffu;
+const int BYTECODE_SHIFT = 8;
#define BYTECODE_ITERATOR(V) \
V(BREAK, 0, 4) /* bc8 */ \
@@ -72,24 +72,23 @@ V(AND_CHECK_4_CHARS, 27, 16) /* bc8 pad24 uint32 uint32 addr32 */ \
V(AND_CHECK_CHAR, 28, 12) /* bc8 pad8 uint16 uint32 addr32 */ \
V(AND_CHECK_NOT_4_CHARS, 29, 16) /* bc8 pad24 uint32 uint32 addr32 */ \
V(AND_CHECK_NOT_CHAR, 30, 12) /* bc8 pad8 uint16 uint32 addr32 */ \
-V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 addr32 */ \
-V(CHECK_LT, 32, 8) /* bc8 pad8 uc16 addr32 */ \
-V(CHECK_GT, 33, 8) /* bc8 pad8 uc16 addr32 */ \
-V(CHECK_NOT_BACK_REF, 34, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_NOT_BACK_REF_NO_CASE, 35, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_NOT_REGS_EQUAL, 36, 12) /* bc8 regidx24 reg_idx32 addr32 */ \
-V(LOOKUP_MAP1, 37, 12) /* bc8 pad8 start16 bit_map_addr32 addr32 */ \
-V(LOOKUP_MAP2, 38, 96) /* bc8 pad8 start16 half_nibble_map_addr32* */ \
-V(LOOKUP_MAP8, 39, 96) /* bc8 pad8 start16 byte_map addr32* */ \
-V(LOOKUP_HI_MAP8, 40, 96) /* bc8 start24 byte_map_addr32 addr32* */ \
-V(CHECK_REGISTER_LT, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \
-V(CHECK_REGISTER_GE, 42, 12) /* bc8 reg_idx24 value32 addr32 */ \
-V(CHECK_REGISTER_EQ_POS, 43, 8) /* bc8 reg_idx24 addr32 */ \
-V(CHECK_AT_START, 44, 8) /* bc8 pad24 addr32 */ \
-V(CHECK_NOT_AT_START, 45, 8) /* bc8 pad24 addr32 */ \
-V(CHECK_GREEDY, 46, 8) /* bc8 pad24 addr32 */ \
-V(ADVANCE_CP_AND_GOTO, 47, 8) /* bc8 offset24 addr32 */ \
-V(SET_CURRENT_POSITION_FROM_END, 48, 4) /* bc8 idx24 */
+V(MINUS_AND_CHECK_NOT_CHAR, 31, 12) /* bc8 pad8 uc16 uc16 uc16 addr32 */ \
+V(CHECK_CHAR_IN_RANGE, 32, 12) /* bc8 pad24 uc16 uc16 addr32 */ \
+V(CHECK_CHAR_NOT_IN_RANGE, 33, 12) /* bc8 pad24 uc16 uc16 addr32 */ \
+V(CHECK_BIT_IN_TABLE, 34, 24) /* bc8 pad24 addr32 bits128 */ \
+V(CHECK_LT, 35, 8) /* bc8 pad8 uc16 addr32 */ \
+V(CHECK_GT, 36, 8) /* bc8 pad8 uc16 addr32 */ \
+V(CHECK_NOT_BACK_REF, 37, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_NOT_BACK_REF_NO_CASE, 38, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_NOT_REGS_EQUAL, 39, 12) /* bc8 regidx24 reg_idx32 addr32 */ \
+V(CHECK_REGISTER_LT, 40, 12) /* bc8 reg_idx24 value32 addr32 */ \
+V(CHECK_REGISTER_GE, 41, 12) /* bc8 reg_idx24 value32 addr32 */ \
+V(CHECK_REGISTER_EQ_POS, 42, 8) /* bc8 reg_idx24 addr32 */ \
+V(CHECK_AT_START, 43, 8) /* bc8 pad24 addr32 */ \
+V(CHECK_NOT_AT_START, 44, 8) /* bc8 pad24 addr32 */ \
+V(CHECK_GREEDY, 45, 8) /* bc8 pad24 addr32 */ \
+V(ADVANCE_CP_AND_GOTO, 46, 8) /* bc8 offset24 addr32 */ \
+V(SET_CURRENT_POSITION_FROM_END, 47, 4) /* bc8 idx24 */
#define DECLARE_BYTECODES(name, code, length) \
static const int BC_##name = code;
diff --git a/src/3rdparty/v8/src/char-predicates-inl.h b/src/3rdparty/v8/src/char-predicates-inl.h
index 0dfc80d..1a89ef3 100644
--- a/src/3rdparty/v8/src/char-predicates-inl.h
+++ b/src/3rdparty/v8/src/char-predicates-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -52,7 +52,7 @@ inline bool IsLineFeed(uc32 c) {
}
-static inline bool IsInRange(int value, int lower_limit, int higher_limit) {
+inline bool IsInRange(int value, int lower_limit, int higher_limit) {
ASSERT(lower_limit <= higher_limit);
return static_cast<unsigned int>(value - lower_limit) <=
static_cast<unsigned int>(higher_limit - lower_limit);
diff --git a/src/3rdparty/v8/src/char-predicates.h b/src/3rdparty/v8/src/char-predicates.h
index 5a901a2..b97191f 100644
--- a/src/3rdparty/v8/src/char-predicates.h
+++ b/src/3rdparty/v8/src/char-predicates.h
@@ -57,6 +57,8 @@ struct IdentifierPart {
static inline bool Is(uc32 c) {
return IdentifierStart::Is(c)
|| unibrow::Number::Is(c)
+ || c == 0x200C // U+200C is Zero-Width Non-Joiner.
+ || c == 0x200D // U+200D is Zero-Width Joiner.
|| unibrow::CombiningMark::Is(c)
|| unibrow::ConnectorPunctuation::Is(c);
}
diff --git a/src/3rdparty/v8/src/checks.h b/src/3rdparty/v8/src/checks.h
index 832f778..608aa14 100644
--- a/src/3rdparty/v8/src/checks.h
+++ b/src/3rdparty/v8/src/checks.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -51,28 +51,20 @@ extern "C" void V8_Fatal(const char* file, int line, const char* format, ...);
#endif
-// Used by the CHECK macro -- should not be called directly.
-static inline void CheckHelper(const char* file,
- int line,
- const char* source,
- bool condition) {
- if (!condition)
- V8_Fatal(file, line, "CHECK(%s) failed", source);
-}
-
-
// The CHECK macro checks that the given condition is true; if not, it
// prints a message to stderr and aborts.
-#define CHECK(condition) do { \
- if (!(condition)) CheckHelper(__FILE__, __LINE__, #condition, false); \
+#define CHECK(condition) do { \
+ if (!(condition)) { \
+ V8_Fatal(__FILE__, __LINE__, "CHECK(%s) failed", #condition); \
+ } \
} while (0)
// Helper function used by the CHECK_EQ function when given int
// arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file, int line,
- const char* expected_source, int expected,
- const char* value_source, int value) {
+inline void CheckEqualsHelper(const char* file, int line,
+ const char* expected_source, int expected,
+ const char* value_source, int value) {
if (expected != value) {
V8_Fatal(file, line,
"CHECK_EQ(%s, %s) failed\n# Expected: %i\n# Found: %i",
@@ -83,11 +75,11 @@ static inline void CheckEqualsHelper(const char* file, int line,
// Helper function used by the CHECK_EQ function when given int64_t
// arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file, int line,
- const char* expected_source,
- int64_t expected,
- const char* value_source,
- int64_t value) {
+inline void CheckEqualsHelper(const char* file, int line,
+ const char* expected_source,
+ int64_t expected,
+ const char* value_source,
+ int64_t value) {
if (expected != value) {
// Print int64_t values in hex, as two int32s,
// to avoid platform-dependencies.
@@ -105,12 +97,12 @@ static inline void CheckEqualsHelper(const char* file, int line,
// Helper function used by the CHECK_NE function when given int
// arguments. Should not be called directly.
-static inline void CheckNonEqualsHelper(const char* file,
- int line,
- const char* unexpected_source,
- int unexpected,
- const char* value_source,
- int value) {
+inline void CheckNonEqualsHelper(const char* file,
+ int line,
+ const char* unexpected_source,
+ int unexpected,
+ const char* value_source,
+ int value) {
if (unexpected == value) {
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %i",
unexpected_source, value_source, value);
@@ -120,12 +112,12 @@ static inline void CheckNonEqualsHelper(const char* file,
// Helper function used by the CHECK function when given string
// arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- const char* expected,
- const char* value_source,
- const char* value) {
+inline void CheckEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ const char* expected,
+ const char* value_source,
+ const char* value) {
if ((expected == NULL && value != NULL) ||
(expected != NULL && value == NULL) ||
(expected != NULL && value != NULL && strcmp(expected, value) != 0)) {
@@ -136,12 +128,12 @@ static inline void CheckEqualsHelper(const char* file,
}
-static inline void CheckNonEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- const char* expected,
- const char* value_source,
- const char* value) {
+inline void CheckNonEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ const char* expected,
+ const char* value_source,
+ const char* value) {
if (expected == value ||
(expected != NULL && value != NULL && strcmp(expected, value) == 0)) {
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %s",
@@ -152,12 +144,12 @@ static inline void CheckNonEqualsHelper(const char* file,
// Helper function used by the CHECK function when given pointer
// arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- const void* expected,
- const char* value_source,
- const void* value) {
+inline void CheckEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ const void* expected,
+ const char* value_source,
+ const void* value) {
if (expected != value) {
V8_Fatal(file, line,
"CHECK_EQ(%s, %s) failed\n# Expected: %p\n# Found: %p",
@@ -167,12 +159,12 @@ static inline void CheckEqualsHelper(const char* file,
}
-static inline void CheckNonEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- const void* expected,
- const char* value_source,
- const void* value) {
+inline void CheckNonEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ const void* expected,
+ const char* value_source,
+ const void* value) {
if (expected == value) {
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n# Value: %p",
expected_source, value_source, value);
@@ -182,12 +174,12 @@ static inline void CheckNonEqualsHelper(const char* file,
// Helper function used by the CHECK function when given floating
// point arguments. Should not be called directly.
-static inline void CheckEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- double expected,
- const char* value_source,
- double value) {
+inline void CheckEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ double expected,
+ const char* value_source,
+ double value) {
// Force values to 64 bit memory to truncate 80 bit precision on IA32.
volatile double* exp = new double[1];
*exp = expected;
@@ -203,12 +195,12 @@ static inline void CheckEqualsHelper(const char* file,
}
-static inline void CheckNonEqualsHelper(const char* file,
- int line,
- const char* expected_source,
- double expected,
- const char* value_source,
- double value) {
+inline void CheckNonEqualsHelper(const char* file,
+ int line,
+ const char* expected_source,
+ double expected,
+ const char* value_source,
+ double value) {
// Force values to 64 bit memory to truncate 80 bit precision on IA32.
volatile double* exp = new double[1];
*exp = expected;
@@ -265,16 +257,16 @@ extern bool FLAG_enable_slow_asserts;
// The ASSERT macro is equivalent to CHECK except that it only
// generates code in debug builds.
#ifdef DEBUG
-#define ASSERT_RESULT(expr) CHECK(expr)
-#define ASSERT(condition) CHECK(condition)
-#define ASSERT_EQ(v1, v2) CHECK_EQ(v1, v2)
-#define ASSERT_NE(v1, v2) CHECK_NE(v1, v2)
-#define ASSERT_GE(v1, v2) CHECK_GE(v1, v2)
-#define ASSERT_LT(v1, v2) CHECK_LT(v1, v2)
-#define ASSERT_LE(v1, v2) CHECK_LE(v1, v2)
-#define SLOW_ASSERT(condition) if (FLAG_enable_slow_asserts) CHECK(condition)
+#define ASSERT_RESULT(expr) CHECK(expr)
+#define ASSERT(condition) CHECK(condition)
+#define ASSERT_EQ(v1, v2) CHECK_EQ(v1, v2)
+#define ASSERT_NE(v1, v2) CHECK_NE(v1, v2)
+#define ASSERT_GE(v1, v2) CHECK_GE(v1, v2)
+#define ASSERT_LT(v1, v2) CHECK_LT(v1, v2)
+#define ASSERT_LE(v1, v2) CHECK_LE(v1, v2)
+#define SLOW_ASSERT(condition) CHECK(!FLAG_enable_slow_asserts || (condition))
#else
-#define ASSERT_RESULT(expr) (expr)
+#define ASSERT_RESULT(expr) (expr)
#define ASSERT(condition) ((void) 0)
#define ASSERT_EQ(v1, v2) ((void) 0)
#define ASSERT_NE(v1, v2) ((void) 0)
diff --git a/src/3rdparty/v8/src/code-stubs.cc b/src/3rdparty/v8/src/code-stubs.cc
index b437436..11016c8 100644
--- a/src/3rdparty/v8/src/code-stubs.cc
+++ b/src/3rdparty/v8/src/code-stubs.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,7 +40,7 @@ namespace internal {
bool CodeStub::FindCodeInCache(Code** code_out) {
Heap* heap = Isolate::Current()->heap();
int index = heap->code_stubs()->FindEntry(GetKey());
- if (index != NumberDictionary::kNotFound) {
+ if (index != UnseededNumberDictionary::kNotFound) {
*code_out = Code::cast(heap->code_stubs()->ValueAt(index));
return true;
}
@@ -101,7 +101,14 @@ Handle<Code> CodeStub::GetCode() {
Factory* factory = isolate->factory();
Heap* heap = isolate->heap();
Code* code;
- if (!FindCodeInCache(&code)) {
+ if (UseSpecialCache()
+ ? FindCodeInSpecialCache(&code)
+ : FindCodeInCache(&code)) {
+ ASSERT(IsPregenerated() == code->is_pregenerated());
+ return Handle<Code>(code);
+ }
+
+ {
HandleScope scope(isolate);
// Generate the new code.
@@ -119,67 +126,28 @@ Handle<Code> CodeStub::GetCode() {
Handle<Code> new_object = factory->NewCode(
desc, flags, masm.CodeObject(), NeedsImmovableCode());
RecordCodeGeneration(*new_object, &masm);
- FinishCode(*new_object);
-
- // Update the dictionary and the root in Heap.
- Handle<NumberDictionary> dict =
- factory->DictionaryAtNumberPut(
- Handle<NumberDictionary>(heap->code_stubs()),
- GetKey(),
- new_object);
- heap->public_set_code_stubs(*dict);
+ FinishCode(new_object);
+
+ if (UseSpecialCache()) {
+ AddToSpecialCache(new_object);
+ } else {
+ // Update the dictionary and the root in Heap.
+ Handle<UnseededNumberDictionary> dict =
+ factory->DictionaryAtNumberPut(
+ Handle<UnseededNumberDictionary>(heap->code_stubs()),
+ GetKey(),
+ new_object);
+ heap->public_set_code_stubs(*dict);
+ }
code = *new_object;
- Activate(code);
- } else {
- CHECK(IsPregenerated() == code->is_pregenerated());
}
+ Activate(code);
ASSERT(!NeedsImmovableCode() || heap->lo_space()->Contains(code));
return Handle<Code>(code, isolate);
}
-MaybeObject* CodeStub::TryGetCode() {
- Code* code;
- if (!FindCodeInCache(&code)) {
- // Generate the new code.
- MacroAssembler masm(Isolate::Current(), NULL, 256);
- GenerateCode(&masm);
- Heap* heap = masm.isolate()->heap();
-
- // Create the code object.
- CodeDesc desc;
- masm.GetCode(&desc);
-
- // Try to copy the generated code into a heap object.
- Code::Flags flags = Code::ComputeFlags(
- static_cast<Code::Kind>(GetCodeKind()),
- GetICState());
- Object* new_object;
- { MaybeObject* maybe_new_object =
- heap->CreateCode(desc, flags, masm.CodeObject());
- if (!maybe_new_object->ToObject(&new_object)) return maybe_new_object;
- }
- code = Code::cast(new_object);
- RecordCodeGeneration(code, &masm);
- FinishCode(code);
-
- // Try to update the code cache but do not fail if unable.
- MaybeObject* maybe_new_object =
- heap->code_stubs()->AtNumberPut(GetKey(), code);
- if (maybe_new_object->ToObject(&new_object)) {
- heap->public_set_code_stubs(NumberDictionary::cast(new_object));
- } else if (MustBeInStubCache()) {
- return maybe_new_object;
- }
-
- Activate(code);
- }
-
- return code;
-}
-
-
const char* CodeStub::MajorName(CodeStub::Major major_key,
bool allow_unknown_keys) {
switch (major_key) {
@@ -200,6 +168,32 @@ void CodeStub::PrintName(StringStream* stream) {
}
+void ICCompareStub::AddToSpecialCache(Handle<Code> new_object) {
+ ASSERT(*known_map_ != NULL);
+ Isolate* isolate = new_object->GetIsolate();
+ Factory* factory = isolate->factory();
+ return Map::UpdateCodeCache(known_map_,
+ factory->compare_ic_symbol(),
+ new_object);
+}
+
+
+bool ICCompareStub::FindCodeInSpecialCache(Code** code_out) {
+ Isolate* isolate = known_map_->GetIsolate();
+ Factory* factory = isolate->factory();
+ Code::Flags flags = Code::ComputeFlags(
+ static_cast<Code::Kind>(GetCodeKind()),
+ UNINITIALIZED);
+ Handle<Object> probe(
+ known_map_->FindInCodeCache(*factory->compare_ic_symbol(), flags));
+ if (probe->IsCode()) {
+ *code_out = Code::cast(*probe);
+ return true;
+ }
+ return false;
+}
+
+
int ICCompareStub::MinorKey() {
return OpField::encode(op_ - Token::EQ) | StateField::encode(state_);
}
@@ -225,6 +219,10 @@ void ICCompareStub::Generate(MacroAssembler* masm) {
case CompareIC::OBJECTS:
GenerateObjects(masm);
break;
+ case CompareIC::KNOWN_OBJECTS:
+ ASSERT(*known_map_ != NULL);
+ GenerateKnownObjects(masm);
+ break;
default:
UNREACHABLE();
}
@@ -254,6 +252,14 @@ void InstanceofStub::PrintName(StringStream* stream) {
}
+void JSEntryStub::FinishCode(Handle<Code> code) {
+ Handle<FixedArray> handler_table =
+ code->GetIsolate()->factory()->NewFixedArray(1, TENURED);
+ handler_table->set(0, Smi::FromInt(handler_offset_));
+ code->set_handler_table(*handler_table);
+}
+
+
void KeyedLoadElementStub::Generate(MacroAssembler* masm) {
switch (elements_kind_) {
case FAST_ELEMENTS:
@@ -290,12 +296,14 @@ void KeyedStoreElementStub::Generate(MacroAssembler* masm) {
case FAST_SMI_ONLY_ELEMENTS: {
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_js_array_,
- elements_kind_);
+ elements_kind_,
+ grow_mode_);
}
break;
case FAST_DOUBLE_ELEMENTS:
KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
- is_js_array_);
+ is_js_array_,
+ grow_mode_);
break;
case EXTERNAL_BYTE_ELEMENTS:
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
@@ -336,6 +344,12 @@ void CallFunctionStub::PrintName(StringStream* stream) {
}
+void CallConstructStub::PrintName(StringStream* stream) {
+ stream->Add("CallConstructStub");
+ if (RecordCallTarget()) stream->Add("_Recording");
+}
+
+
void ToBooleanStub::PrintName(StringStream* stream) {
stream->Add("ToBooleanStub_");
types_.Print(stream);
@@ -428,10 +442,13 @@ void ElementsTransitionAndStoreStub::Generate(MacroAssembler* masm) {
}
KeyedStoreStubCompiler::GenerateStoreFastElement(masm,
is_jsarray_,
- FAST_ELEMENTS);
+ FAST_ELEMENTS,
+ grow_mode_);
} else if (from_ == FAST_SMI_ONLY_ELEMENTS && to_ == FAST_DOUBLE_ELEMENTS) {
ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
- KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm, is_jsarray_);
+ KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(masm,
+ is_jsarray_,
+ grow_mode_);
} else {
UNREACHABLE();
}
diff --git a/src/3rdparty/v8/src/code-stubs.h b/src/3rdparty/v8/src/code-stubs.h
index 1c5747a..b67e961 100644
--- a/src/3rdparty/v8/src/code-stubs.h
+++ b/src/3rdparty/v8/src/code-stubs.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -38,6 +38,7 @@ namespace internal {
// List of code stubs used on all platforms.
#define CODE_STUB_LIST_ALL_PLATFORMS(V) \
V(CallFunction) \
+ V(CallConstruct) \
V(UnaryOp) \
V(BinaryOp) \
V(StringAdd) \
@@ -54,10 +55,12 @@ namespace internal {
V(ConvertToDouble) \
V(WriteInt32ToHeapNumber) \
V(StackCheck) \
+ V(Interrupt) \
V(FastNewClosure) \
V(FastNewContext) \
V(FastNewBlockContext) \
V(FastCloneShallowArray) \
+ V(FastCloneShallowObject) \
V(ToBoolean) \
V(ToNumber) \
V(ArgumentsAccess) \
@@ -118,11 +121,6 @@ class CodeStub BASE_EMBEDDED {
// Retrieve the code for the stub. Generate the code if needed.
Handle<Code> GetCode();
- // Retrieve the code for the stub if already generated. Do not
- // generate the code if not already generated and instead return a
- // retry after GC Failure object.
- MUST_USE_RESULT MaybeObject* TryGetCode();
-
static Major MajorKeyFromKey(uint32_t key) {
return static_cast<Major>(MajorKeyBits::decode(key));
}
@@ -160,14 +158,14 @@ class CodeStub BASE_EMBEDDED {
// result in a traversable stack.
virtual bool SometimesSetsUpAFrame() { return true; }
+ // Lookup the code in the (possibly custom) cache.
+ bool FindCodeInCache(Code** code_out);
+
protected:
static const int kMajorBits = 6;
static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
private:
- // Lookup the code in the (possibly custom) cache.
- bool FindCodeInCache(Code** code_out);
-
// Nonvirtual wrapper around the stub-specific Generate function. Call
// this function to set up the macro assembler and generate the code.
void GenerateCode(MacroAssembler* masm);
@@ -180,11 +178,7 @@ class CodeStub BASE_EMBEDDED {
void RecordCodeGeneration(Code* code, MacroAssembler* masm);
// Finish the code object after it has been generated.
- virtual void FinishCode(Code* code) { }
-
- // Returns true if TryGetCode should fail if it failed
- // to register newly generated stub in the stub cache.
- virtual bool MustBeInStubCache() { return false; }
+ virtual void FinishCode(Handle<Code> code) { }
// Activate newly generated stub. Is called after
// registering stub in the stub cache.
@@ -202,6 +196,17 @@ class CodeStub BASE_EMBEDDED {
return UNINITIALIZED;
}
+ // Add the code to a specialized cache, specific to an individual
+ // stub type. Please note, this method must add the code object to a
+ // roots object, otherwise we will remove the code during GC.
+ virtual void AddToSpecialCache(Handle<Code> new_object) { }
+
+ // Find code in a specialized cache, work is delegated to the specific stub.
+ virtual bool FindCodeInSpecialCache(Code** code_out) { return false; }
+
+ // If a stub uses a special cache override this.
+ virtual bool UseSpecialCache() { return false; }
+
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
virtual void PrintName(StringStream* stream);
@@ -293,6 +298,18 @@ class StackCheckStub : public CodeStub {
};
+class InterruptStub : public CodeStub {
+ public:
+ InterruptStub() { }
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ Major MajorKey() { return Interrupt; }
+ int MinorKey() { return 0; }
+};
+
+
class ToNumberStub: public CodeStub {
public:
ToNumberStub() { }
@@ -307,16 +324,17 @@ class ToNumberStub: public CodeStub {
class FastNewClosureStub : public CodeStub {
public:
- explicit FastNewClosureStub(StrictModeFlag strict_mode)
- : strict_mode_(strict_mode) { }
+ explicit FastNewClosureStub(LanguageMode language_mode)
+ : language_mode_(language_mode) { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return FastNewClosure; }
- int MinorKey() { return strict_mode_; }
+ int MinorKey() { return language_mode_ == CLASSIC_MODE
+ ? kNonStrictMode : kStrictMode; }
- StrictModeFlag strict_mode_;
+ LanguageMode language_mode_;
};
@@ -325,7 +343,7 @@ class FastNewContextStub : public CodeStub {
static const int kMaximumSlots = 64;
explicit FastNewContextStub(int slots) : slots_(slots) {
- ASSERT(slots_ >= 0 && slots_ <= kMaximumSlots);
+ ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
@@ -343,7 +361,7 @@ class FastNewBlockContextStub : public CodeStub {
static const int kMaximumSlots = 64;
explicit FastNewBlockContextStub(int slots) : slots_(slots) {
- ASSERT(slots_ >= 0 && slots_ <= kMaximumSlots);
+ ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
void Generate(MacroAssembler* masm);
@@ -364,14 +382,15 @@ class FastCloneShallowArrayStub : public CodeStub {
enum Mode {
CLONE_ELEMENTS,
CLONE_DOUBLE_ELEMENTS,
- COPY_ON_WRITE_ELEMENTS
+ COPY_ON_WRITE_ELEMENTS,
+ CLONE_ANY_ELEMENTS
};
FastCloneShallowArrayStub(Mode mode, int length)
: mode_(mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
- ASSERT(length_ >= 0);
- ASSERT(length_ <= kMaximumClonedLength);
+ ASSERT_GE(length_, 0);
+ ASSERT_LE(length_, kMaximumClonedLength);
}
void Generate(MacroAssembler* masm);
@@ -382,12 +401,32 @@ class FastCloneShallowArrayStub : public CodeStub {
Major MajorKey() { return FastCloneShallowArray; }
int MinorKey() {
- ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2);
- return length_ * 3 + mode_;
+ ASSERT(mode_ == 0 || mode_ == 1 || mode_ == 2 || mode_ == 3);
+ return length_ * 4 + mode_;
}
};
+class FastCloneShallowObjectStub : public CodeStub {
+ public:
+ // Maximum number of properties in copied object.
+ static const int kMaximumClonedProperties = 6;
+
+ explicit FastCloneShallowObjectStub(int length) : length_(length) {
+ ASSERT_GE(length_, 0);
+ ASSERT_LE(length_, kMaximumClonedProperties);
+ }
+
+ void Generate(MacroAssembler* masm);
+
+ private:
+ int length_;
+
+ Major MajorKey() { return FastCloneShallowObject; }
+ int MinorKey() { return length_; }
+};
+
+
class InstanceofStub: public CodeStub {
public:
enum Flags {
@@ -428,12 +467,17 @@ class InstanceofStub: public CodeStub {
class MathPowStub: public CodeStub {
public:
- MathPowStub() {}
+ enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK};
+
+ explicit MathPowStub(ExponentType exponent_type)
+ : exponent_type_(exponent_type) { }
virtual void Generate(MacroAssembler* masm);
private:
virtual CodeStub::Major MajorKey() { return MathPow; }
- virtual int MinorKey() { return 0; }
+ virtual int MinorKey() { return exponent_type_; }
+
+ ExponentType exponent_type_;
};
@@ -446,11 +490,15 @@ class ICCompareStub: public CodeStub {
virtual void Generate(MacroAssembler* masm);
+ void set_known_map(Handle<Map> map) { known_map_ = map; }
+
private:
class OpField: public BitField<int, 0, 3> { };
class StateField: public BitField<int, 3, 5> { };
- virtual void FinishCode(Code* code) { code->set_compare_state(state_); }
+ virtual void FinishCode(Handle<Code> code) {
+ code->set_compare_state(state_);
+ }
virtual CodeStub::Major MajorKey() { return CompareIC; }
virtual int MinorKey();
@@ -463,12 +511,18 @@ class ICCompareStub: public CodeStub {
void GenerateStrings(MacroAssembler* masm);
void GenerateObjects(MacroAssembler* masm);
void GenerateMiss(MacroAssembler* masm);
+ void GenerateKnownObjects(MacroAssembler* masm);
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return CompareIC::ComputeCondition(op_); }
+ virtual void AddToSpecialCache(Handle<Code> new_object);
+ virtual bool FindCodeInSpecialCache(Code** code_out);
+ virtual bool UseSpecialCache() { return state_ == CompareIC::KNOWN_OBJECTS; }
+
Token::Value op_;
CompareIC::State state_;
+ Handle<Map> known_map_;
};
@@ -553,7 +607,7 @@ class CompareStub: public CodeStub {
int MinorKey();
virtual int GetCodeKind() { return Code::COMPARE_IC; }
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_compare_state(CompareIC::GENERIC);
}
@@ -591,9 +645,6 @@ class CEntryStub : public CodeStub {
Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate_scope);
- void GenerateThrowTOS(MacroAssembler* masm);
- void GenerateThrowUncatchable(MacroAssembler* masm,
- UncatchableExceptionType type);
// Number of pointers/values returned.
const int result_size_;
@@ -618,6 +669,10 @@ class JSEntryStub : public CodeStub {
private:
Major MajorKey() { return JSEntry; }
int MinorKey() { return 0; }
+
+ virtual void FinishCode(Handle<Code> code);
+
+ int handler_offset_;
};
@@ -694,32 +749,14 @@ class CallFunctionStub: public CodeStub {
void Generate(MacroAssembler* masm);
- virtual void FinishCode(Code* code);
-
- static void Clear(Heap* heap, Address address);
-
- static Object* GetCachedValue(Address address);
+ virtual void FinishCode(Handle<Code> code) {
+ code->set_has_function_cache(RecordCallTarget());
+ }
static int ExtractArgcFromMinorKey(int minor_key) {
return ArgcBits::decode(minor_key);
}
- // The object that indicates an uninitialized cache.
- static Handle<Object> UninitializedSentinel(Isolate* isolate) {
- return isolate->factory()->the_hole_value();
- }
-
- // A raw version of the uninitialized sentinel that's safe to read during
- // garbage collection (e.g., for patching the cache).
- static Object* RawUninitializedSentinel(Heap* heap) {
- return heap->raw_unchecked_the_hole_value();
- }
-
- // The object that indicates a megamorphic state.
- static Handle<Object> MegamorphicSentinel(Isolate* isolate) {
- return isolate->factory()->undefined_value();
- }
-
private:
int argc_;
CallFunctionFlags flags_;
@@ -746,6 +783,30 @@ class CallFunctionStub: public CodeStub {
};
+class CallConstructStub: public CodeStub {
+ public:
+ explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
+
+ void Generate(MacroAssembler* masm);
+
+ virtual void FinishCode(Handle<Code> code) {
+ code->set_has_function_cache(RecordCallTarget());
+ }
+
+ private:
+ CallFunctionFlags flags_;
+
+ virtual void PrintName(StringStream* stream);
+
+ Major MajorKey() { return CallConstruct; }
+ int MinorKey() { return flags_; }
+
+ bool RecordCallTarget() {
+ return (flags_ & RECORD_CALL_TARGET) != 0;
+ }
+};
+
+
enum StringIndexFlags {
// Accepts smis or heap numbers.
STRING_INDEX_IS_NUMBER,
@@ -771,7 +832,6 @@ class StringCharCodeAtGenerator {
public:
StringCharCodeAtGenerator(Register object,
Register index,
- Register scratch,
Register result,
Label* receiver_not_string,
Label* index_not_number,
@@ -779,15 +839,11 @@ class StringCharCodeAtGenerator {
StringIndexFlags index_flags)
: object_(object),
index_(index),
- scratch_(scratch),
result_(result),
receiver_not_string_(receiver_not_string),
index_not_number_(index_not_number),
index_out_of_range_(index_out_of_range),
index_flags_(index_flags) {
- ASSERT(!scratch_.is(object_));
- ASSERT(!scratch_.is(index_));
- ASSERT(!scratch_.is(result_));
ASSERT(!result_.is(object_));
ASSERT(!result_.is(index_));
}
@@ -805,7 +861,6 @@ class StringCharCodeAtGenerator {
private:
Register object_;
Register index_;
- Register scratch_;
Register result_;
Label* receiver_not_string_;
@@ -868,8 +923,7 @@ class StringCharAtGenerator {
public:
StringCharAtGenerator(Register object,
Register index,
- Register scratch1,
- Register scratch2,
+ Register scratch,
Register result,
Label* receiver_not_string,
Label* index_not_number,
@@ -877,13 +931,12 @@ class StringCharAtGenerator {
StringIndexFlags index_flags)
: char_code_at_generator_(object,
index,
- scratch1,
- scratch2,
+ scratch,
receiver_not_string,
index_not_number,
index_out_of_range,
index_flags),
- char_from_code_generator_(scratch2, result) {}
+ char_from_code_generator_(scratch, result) {}
// Generates the fast case code. On the fallthrough path |result|
// register contains the result.
@@ -942,20 +995,29 @@ class KeyedLoadElementStub : public CodeStub {
class KeyedStoreElementStub : public CodeStub {
public:
KeyedStoreElementStub(bool is_js_array,
- ElementsKind elements_kind)
- : is_js_array_(is_js_array),
- elements_kind_(elements_kind) { }
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode)
+ : is_js_array_(is_js_array),
+ elements_kind_(elements_kind),
+ grow_mode_(grow_mode) { }
Major MajorKey() { return KeyedStoreElement; }
int MinorKey() {
- return (is_js_array_ ? 0 : kElementsKindCount) + elements_kind_;
+ return ElementsKindBits::encode(elements_kind_) |
+ IsJSArrayBits::encode(is_js_array_) |
+ GrowModeBits::encode(grow_mode_);
}
void Generate(MacroAssembler* masm);
private:
+ class ElementsKindBits: public BitField<ElementsKind, 0, 8> {};
+ class GrowModeBits: public BitField<KeyedAccessGrowMode, 8, 1> {};
+ class IsJSArrayBits: public BitField<bool, 9, 1> {};
+
bool is_js_array_;
ElementsKind elements_kind_;
+ KeyedAccessGrowMode grow_mode_;
DISALLOW_COPY_AND_ASSIGN(KeyedStoreElementStub);
};
@@ -1013,7 +1075,7 @@ class ToBooleanStub: public CodeStub {
Major MajorKey() { return ToBoolean; }
int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_to_boolean_state(types_.ToByte());
}
@@ -1033,24 +1095,28 @@ class ElementsTransitionAndStoreStub : public CodeStub {
ElementsTransitionAndStoreStub(ElementsKind from,
ElementsKind to,
bool is_jsarray,
- StrictModeFlag strict_mode)
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode)
: from_(from),
to_(to),
is_jsarray_(is_jsarray),
- strict_mode_(strict_mode) {}
+ strict_mode_(strict_mode),
+ grow_mode_(grow_mode) {}
private:
- class FromBits: public BitField<ElementsKind, 0, 8> {};
- class ToBits: public BitField<ElementsKind, 8, 8> {};
- class IsJSArrayBits: public BitField<bool, 16, 8> {};
- class StrictModeBits: public BitField<StrictModeFlag, 24, 8> {};
+ class FromBits: public BitField<ElementsKind, 0, 8> {};
+ class ToBits: public BitField<ElementsKind, 8, 8> {};
+ class IsJSArrayBits: public BitField<bool, 16, 1> {};
+ class StrictModeBits: public BitField<StrictModeFlag, 17, 1> {};
+ class GrowModeBits: public BitField<KeyedAccessGrowMode, 18, 1> {};
Major MajorKey() { return ElementsTransitionAndStore; }
int MinorKey() {
return FromBits::encode(from_) |
ToBits::encode(to_) |
IsJSArrayBits::encode(is_jsarray_) |
- StrictModeBits::encode(strict_mode_);
+ StrictModeBits::encode(strict_mode_) |
+ GrowModeBits::encode(grow_mode_);
}
void Generate(MacroAssembler* masm);
@@ -1059,6 +1125,7 @@ class ElementsTransitionAndStoreStub : public CodeStub {
ElementsKind to_;
bool is_jsarray_;
StrictModeFlag strict_mode_;
+ KeyedAccessGrowMode grow_mode_;
DISALLOW_COPY_AND_ASSIGN(ElementsTransitionAndStoreStub);
};
diff --git a/src/3rdparty/v8/src/codegen.cc b/src/3rdparty/v8/src/codegen.cc
index ceea7b9..0163580 100644
--- a/src/3rdparty/v8/src/codegen.cc
+++ b/src/3rdparty/v8/src/codegen.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -62,28 +62,15 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
#ifdef DEBUG
bool print_source = false;
bool print_ast = false;
- bool print_json_ast = false;
const char* ftype;
if (Isolate::Current()->bootstrapper()->IsActive()) {
print_source = FLAG_print_builtin_source;
print_ast = FLAG_print_builtin_ast;
- print_json_ast = FLAG_print_builtin_json_ast;
ftype = "builtin";
} else {
print_source = FLAG_print_source;
print_ast = FLAG_print_ast;
- print_json_ast = FLAG_print_json_ast;
- Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
- if (print_source && !filter.is_empty()) {
- print_source = info->function()->name()->IsEqualTo(filter);
- }
- if (print_ast && !filter.is_empty()) {
- print_ast = info->function()->name()->IsEqualTo(filter);
- }
- if (print_json_ast && !filter.is_empty()) {
- print_json_ast = info->function()->name()->IsEqualTo(filter);
- }
ftype = "user-defined";
}
@@ -102,11 +89,6 @@ void CodeGenerator::MakeCodePrologue(CompilationInfo* info) {
PrintF("--- AST ---\n%s\n",
AstPrinter().PrintProgram(info->function()));
}
-
- if (print_json_ast) {
- JsonAstBuilder builder;
- PrintF("%s", builder.BuildProgram(info->function()));
- }
#endif // DEBUG
}
@@ -135,11 +117,9 @@ void CodeGenerator::PrintCode(Handle<Code> code, CompilationInfo* info) {
bool print_code = Isolate::Current()->bootstrapper()->IsActive()
? FLAG_print_builtin_code
: (FLAG_print_code || (info->IsOptimizing() && FLAG_print_opt_code));
- Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
- FunctionLiteral* function = info->function();
- bool match = filter.is_empty() || function->debug_name()->IsEqualTo(filter);
- if (print_code && match) {
+ if (print_code) {
// Print the source code if available.
+ FunctionLiteral* function = info->function();
Handle<Script> script = info->script();
if (!script->IsUndefined() && !script->source()->IsUndefined()) {
PrintF("--- Raw source ---\n");
diff --git a/src/3rdparty/v8/src/codegen.h b/src/3rdparty/v8/src/codegen.h
index 5360d3e..50d70f2 100644
--- a/src/3rdparty/v8/src/codegen.h
+++ b/src/3rdparty/v8/src/codegen.h
@@ -84,6 +84,15 @@ enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
namespace v8 {
namespace internal {
+// Results of the library implementation of transcendental functions may differ
+// from the one we use in our generated code. Therefore we use the same
+// generated code both in runtime and compiled code.
+typedef double (*UnaryMathFunction)(double x);
+
+UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type);
+UnaryMathFunction CreateSqrtFunction();
+
+
class ElementsTransitionGenerator : public AllStatic {
public:
static void GenerateSmiOnlyToObject(MacroAssembler* masm);
diff --git a/src/3rdparty/v8/src/collection.js b/src/3rdparty/v8/src/collection.js
index 4e45885..75fe3d5 100644
--- a/src/3rdparty/v8/src/collection.js
+++ b/src/3rdparty/v8/src/collection.js
@@ -25,13 +25,19 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"use strict";
-const $Set = global.Set;
-const $Map = global.Map;
-const $WeakMap = global.WeakMap;
+var $Set = global.Set;
+var $Map = global.Map;
+var $WeakMap = global.WeakMap;
//-------------------------------------------------------------------
+// Global sentinel to be used instead of undefined keys, which are not
+// supported internally but required for Harmony sets and maps.
+var undefined_sentinel = {};
+
+
function SetConstructor() {
if (%_IsConstructCall()) {
%SetInitialize(this);
@@ -42,16 +48,37 @@ function SetConstructor() {
function SetAdd(key) {
+ if (!IS_SET(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Set.prototype.add', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return %SetAdd(this, key);
}
function SetHas(key) {
+ if (!IS_SET(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Set.prototype.has', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return %SetHas(this, key);
}
function SetDelete(key) {
+ if (!IS_SET(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Set.prototype.delete', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return %SetDelete(this, key);
}
@@ -66,21 +93,49 @@ function MapConstructor() {
function MapGet(key) {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.get', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return %MapGet(this, key);
}
function MapSet(key, value) {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.set', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return %MapSet(this, key, value);
}
function MapHas(key) {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.has', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
return !IS_UNDEFINED(%MapGet(this, key));
}
function MapDelete(key) {
+ if (!IS_MAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['Map.prototype.delete', this]);
+ }
+ if (IS_UNDEFINED(key)) {
+ key = undefined_sentinel;
+ }
if (!IS_UNDEFINED(%MapGet(this, key))) {
%MapSet(this, key, void 0);
return true;
@@ -100,6 +155,10 @@ function WeakMapConstructor() {
function WeakMapGet(key) {
+ if (!IS_WEAKMAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['WeakMap.prototype.get', this]);
+ }
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
@@ -108,6 +167,10 @@ function WeakMapGet(key) {
function WeakMapSet(key, value) {
+ if (!IS_WEAKMAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['WeakMap.prototype.set', this]);
+ }
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
@@ -116,6 +179,10 @@ function WeakMapSet(key, value) {
function WeakMapHas(key) {
+ if (!IS_WEAKMAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['WeakMap.prototype.has', this]);
+ }
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
@@ -124,6 +191,10 @@ function WeakMapHas(key) {
function WeakMapDelete(key) {
+ if (!IS_WEAKMAP(this)) {
+ throw MakeTypeError('incompatible_method_receiver',
+ ['WeakMap.prototype.delete', this]);
+ }
if (!IS_SPEC_OBJECT(key)) {
throw %MakeTypeError('invalid_weakmap_key', [this, key]);
}
@@ -149,14 +220,14 @@ function WeakMapDelete(key) {
%SetProperty($Map.prototype, "constructor", $Map, DONT_ENUM);
// Set up the non-enumerable functions on the Set prototype object.
- InstallFunctionsOnHiddenPrototype($Set.prototype, DONT_ENUM, $Array(
+ InstallFunctions($Set.prototype, DONT_ENUM, $Array(
"add", SetAdd,
"has", SetHas,
"delete", SetDelete
));
// Set up the non-enumerable functions on the Map prototype object.
- InstallFunctionsOnHiddenPrototype($Map.prototype, DONT_ENUM, $Array(
+ InstallFunctions($Map.prototype, DONT_ENUM, $Array(
"get", MapGet,
"set", MapSet,
"has", MapHas,
@@ -170,7 +241,7 @@ function WeakMapDelete(key) {
%SetProperty($WeakMap.prototype, "constructor", $WeakMap, DONT_ENUM);
// Set up the non-enumerable functions on the WeakMap prototype object.
- InstallFunctionsOnHiddenPrototype($WeakMap.prototype, DONT_ENUM, $Array(
+ InstallFunctions($WeakMap.prototype, DONT_ENUM, $Array(
"get", WeakMapGet,
"set", WeakMapSet,
"has", WeakMapHas,
diff --git a/src/3rdparty/v8/src/compilation-cache.cc b/src/3rdparty/v8/src/compilation-cache.cc
index 28e833a..82cc223 100644
--- a/src/3rdparty/v8/src/compilation-cache.cc
+++ b/src/3rdparty/v8/src/compilation-cache.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,6 +27,7 @@
#include "v8.h"
+#include "assembler.h"
#include "compilation-cache.h"
#include "serialize.h"
@@ -250,7 +251,8 @@ void CompilationCacheScript::Put(Handle<String> source,
Handle<SharedFunctionInfo> CompilationCacheEval::Lookup(
Handle<String> source,
Handle<Context> context,
- StrictModeFlag strict_mode) {
+ LanguageMode language_mode,
+ int scope_position) {
// Make sure not to leak the table into the surrounding handle
// scope. Otherwise, we risk keeping old tables around even after
// having cleared the cache.
@@ -259,7 +261,8 @@ Handle<SharedFunctionInfo> CompilationCacheEval::Lookup(
{ HandleScope scope(isolate());
for (generation = 0; generation < generations(); generation++) {
Handle<CompilationCacheTable> table = GetTable(generation);
- result = table->LookupEval(*source, *context, strict_mode);
+ result = table->LookupEval(
+ *source, *context, language_mode, scope_position);
if (result->IsSharedFunctionInfo()) {
break;
}
@@ -269,7 +272,7 @@ Handle<SharedFunctionInfo> CompilationCacheEval::Lookup(
Handle<SharedFunctionInfo>
function_info(SharedFunctionInfo::cast(result), isolate());
if (generation != 0) {
- Put(source, context, function_info);
+ Put(source, context, function_info, scope_position);
}
isolate()->counters()->compilation_cache_hits()->Increment();
return function_info;
@@ -283,27 +286,31 @@ Handle<SharedFunctionInfo> CompilationCacheEval::Lookup(
MaybeObject* CompilationCacheEval::TryTablePut(
Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info) {
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position) {
Handle<CompilationCacheTable> table = GetFirstTable();
- return table->PutEval(*source, *context, *function_info);
+ return table->PutEval(*source, *context, *function_info, scope_position);
}
Handle<CompilationCacheTable> CompilationCacheEval::TablePut(
Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info) {
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position) {
CALL_HEAP_FUNCTION(isolate(),
- TryTablePut(source, context, function_info),
+ TryTablePut(
+ source, context, function_info, scope_position),
CompilationCacheTable);
}
void CompilationCacheEval::Put(Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info) {
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position) {
HandleScope scope(isolate());
- SetFirstTable(TablePut(source, context, function_info));
+ SetFirstTable(TablePut(source, context, function_info, scope_position));
}
@@ -389,16 +396,20 @@ Handle<SharedFunctionInfo> CompilationCache::LookupEval(
Handle<String> source,
Handle<Context> context,
bool is_global,
- StrictModeFlag strict_mode) {
+ LanguageMode language_mode,
+ int scope_position) {
if (!IsEnabled()) {
return Handle<SharedFunctionInfo>::null();
}
Handle<SharedFunctionInfo> result;
if (is_global) {
- result = eval_global_.Lookup(source, context, strict_mode);
+ result = eval_global_.Lookup(
+ source, context, language_mode, scope_position);
} else {
- result = eval_contextual_.Lookup(source, context, strict_mode);
+ ASSERT(scope_position != RelocInfo::kNoPosition);
+ result = eval_contextual_.Lookup(
+ source, context, language_mode, scope_position);
}
return result;
}
@@ -427,16 +438,18 @@ void CompilationCache::PutScript(Handle<String> source,
void CompilationCache::PutEval(Handle<String> source,
Handle<Context> context,
bool is_global,
- Handle<SharedFunctionInfo> function_info) {
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position) {
if (!IsEnabled()) {
return;
}
HandleScope scope(isolate());
if (is_global) {
- eval_global_.Put(source, context, function_info);
+ eval_global_.Put(source, context, function_info, scope_position);
} else {
- eval_contextual_.Put(source, context, function_info);
+ ASSERT(scope_position != RelocInfo::kNoPosition);
+ eval_contextual_.Put(source, context, function_info, scope_position);
}
}
diff --git a/src/3rdparty/v8/src/compilation-cache.h b/src/3rdparty/v8/src/compilation-cache.h
index 4339d22..2f2fbad 100644
--- a/src/3rdparty/v8/src/compilation-cache.h
+++ b/src/3rdparty/v8/src/compilation-cache.h
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,8 +31,6 @@
namespace v8 {
namespace internal {
-class HashMap;
-
// The compilation cache consists of several generational sub-caches which uses
// this class as a base class. A sub-cache contains a compilation cache tables
// for each generation of the sub-cache. Since the same source code string has
@@ -123,7 +121,19 @@ class CompilationCacheScript : public CompilationSubCache {
};
-// Sub-cache for eval scripts.
+// Sub-cache for eval scripts. Two caches for eval are used. One for eval calls
+// in global contexts and one for eval calls in other contexts. The cache
+// considers the following pieces of information when checking for matching
+// entries:
+// 1. The source string.
+// 2. The shared function info of the calling function.
+// 3. Whether the source should be compiled as strict code or as non-strict
+// code.
+// Note: Currently there are clients of CompileEval that always compile
+// non-strict code even if the calling function is a strict mode function.
+// More specifically these are the CompileString, DebugEvaluate and
+// DebugEvaluateGlobal runtime functions.
+// 4. The start position of the calling scope.
class CompilationCacheEval: public CompilationSubCache {
public:
CompilationCacheEval(Isolate* isolate, int generations)
@@ -131,23 +141,27 @@ class CompilationCacheEval: public CompilationSubCache {
Handle<SharedFunctionInfo> Lookup(Handle<String> source,
Handle<Context> context,
- StrictModeFlag strict_mode);
+ LanguageMode language_mode,
+ int scope_position);
void Put(Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info);
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position);
private:
MUST_USE_RESULT MaybeObject* TryTablePut(
Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info);
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position);
// Note: Returns a new hash table if operation results in expansion.
Handle<CompilationCacheTable> TablePut(
Handle<String> source,
Handle<Context> context,
- Handle<SharedFunctionInfo> function_info);
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position);
DISALLOW_IMPLICIT_CONSTRUCTORS(CompilationCacheEval);
};
@@ -198,7 +212,8 @@ class CompilationCache {
Handle<SharedFunctionInfo> LookupEval(Handle<String> source,
Handle<Context> context,
bool is_global,
- StrictModeFlag strict_mode);
+ LanguageMode language_mode,
+ int scope_position);
// Returns the regexp data associated with the given regexp if it
// is in cache, otherwise an empty handle.
@@ -215,7 +230,8 @@ class CompilationCache {
void PutEval(Handle<String> source,
Handle<Context> context,
bool is_global,
- Handle<SharedFunctionInfo> function_info);
+ Handle<SharedFunctionInfo> function_info,
+ int scope_position);
// Associate the (source, flags) pair to the given regexp data.
// This may overwrite an existing mapping.
diff --git a/src/3rdparty/v8/src/compiler.cc b/src/3rdparty/v8/src/compiler.cc
index cabca74..c9c2480 100644
--- a/src/3rdparty/v8/src/compiler.cc
+++ b/src/3rdparty/v8/src/compiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -53,22 +53,25 @@ namespace internal {
CompilationInfo::CompilationInfo(Handle<Script> script)
: isolate_(script->GetIsolate()),
- flags_(0),
+ flags_(LanguageModeField::encode(CLASSIC_MODE)),
function_(NULL),
scope_(NULL),
+ global_scope_(NULL),
script_(script),
extension_(NULL),
pre_parse_data_(NULL),
osr_ast_id_(AstNode::kNoNumber) {
- Initialize(NONOPT);
+ Initialize(BASE);
}
CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
: isolate_(shared_info->GetIsolate()),
- flags_(IsLazy::encode(true)),
+ flags_(LanguageModeField::encode(CLASSIC_MODE) |
+ IsLazy::encode(true)),
function_(NULL),
scope_(NULL),
+ global_scope_(NULL),
shared_info_(shared_info),
script_(Handle<Script>(Script::cast(shared_info->script()))),
extension_(NULL),
@@ -80,9 +83,11 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info)
CompilationInfo::CompilationInfo(Handle<JSFunction> closure)
: isolate_(closure->GetIsolate()),
- flags_(IsLazy::encode(true)),
+ flags_(LanguageModeField::encode(CLASSIC_MODE) |
+ IsLazy::encode(true)),
function_(NULL),
scope_(NULL),
+ global_scope_(NULL),
closure_(closure),
shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
script_(Handle<Script>(Script::cast(shared_info_->script()))),
@@ -105,6 +110,19 @@ void CompilationInfo::DisableOptimization() {
}
+// Primitive functions are unlikely to be picked up by the stack-walking
+// profiler, so they trigger their own optimization when they're called
+// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
+bool CompilationInfo::ShouldSelfOptimize() {
+ return FLAG_self_optimization &&
+ FLAG_crankshaft &&
+ !function()->flags()->Contains(kDontSelfOptimize) &&
+ !function()->flags()->Contains(kDontOptimize) &&
+ function()->scope()->allows_lazy_recompilation() &&
+ (shared_info().is_null() || !shared_info()->optimization_disabled());
+}
+
+
void CompilationInfo::AbortOptimization() {
Handle<Code> code(shared_info()->code());
SetCode(code);
@@ -165,7 +183,9 @@ static void FinishOptimization(Handle<JSFunction> function, int64_t start) {
static bool MakeCrankshaftCode(CompilationInfo* info) {
// Test if we can optimize this function when asked to. We can only
// do this after the scopes are computed.
- if (!info->AllowOptimize()) info->DisableOptimization();
+ if (!V8::UseCrankshaft()) {
+ info->DisableOptimization();
+ }
// In case we are not optimizing simply return the code from
// the full code generator.
@@ -185,7 +205,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
// Fall back to using the full code generator if it's not possible
// to use the Hydrogen-based optimizing compiler. We already have
// generated code for this from the shared function object.
- if (AlwaysFullCompiler() || !FLAG_use_hydrogen) {
+ if (AlwaysFullCompiler()) {
info->SetCode(code);
return true;
}
@@ -196,8 +216,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
FLAG_deopt_every_n_times == 0 ? Compiler::kDefaultMaxOptCount : 1000;
if (info->shared_info()->opt_count() > kMaxOptCount) {
info->AbortOptimization();
- Handle<JSFunction> closure = info->closure();
- info->shared_info()->DisableOptimization(*closure);
+ info->shared_info()->DisableOptimization();
// True indicates the compilation pipeline is still going, not
// necessarily that we optimized the code.
return true;
@@ -217,20 +236,22 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
(info->osr_ast_id() != AstNode::kNoNumber &&
scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit)) {
info->AbortOptimization();
- Handle<JSFunction> closure = info->closure();
- info->shared_info()->DisableOptimization(*closure);
+ info->shared_info()->DisableOptimization();
// True indicates the compilation pipeline is still going, not
// necessarily that we optimized the code.
return true;
}
// Take --hydrogen-filter into account.
- Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
Handle<String> name = info->function()->debug_name();
- bool match = filter.is_empty() || name->IsEqualTo(filter);
- if (!match) {
- info->SetCode(code);
- return true;
+ if (*FLAG_hydrogen_filter != '\0') {
+ Vector<const char> filter = CStrVector(FLAG_hydrogen_filter);
+ if ((filter[0] == '-'
+ && name->IsEqualTo(filter.SubVector(1, filter.length())))
+ || (filter[0] != '-' && !name->IsEqualTo(filter))) {
+ info->SetCode(code);
+ return true;
+ }
}
// Recompile the unoptimized version of the code if the current version
@@ -282,7 +303,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
return false;
}
- if (graph != NULL && FLAG_build_lithium) {
+ if (graph != NULL) {
Handle<Code> optimized_code = graph->Compile(info);
if (!optimized_code.is_null()) {
info->SetCode(optimized_code);
@@ -296,8 +317,7 @@ static bool MakeCrankshaftCode(CompilationInfo* info) {
if (!builder.inline_bailout()) {
// Mark the shared code as unoptimizable unless it was an inlined
// function that bailed out.
- Handle<JSFunction> closure = info->closure();
- info->shared_info()->DisableOptimization(*closure);
+ info->shared_info()->DisableOptimization();
}
// True indicates the compilation pipeline is still going, not necessarily
// that we optimized the code.
@@ -326,8 +346,7 @@ bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
// the compilation info is set if compilation succeeded.
bool succeeded = MakeCode(info);
if (!info->shared_info().is_null()) {
- Handle<SerializedScopeInfo> scope_info =
- SerializedScopeInfo::Create(info->scope());
+ Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
info->shared_info()->set_scope_info(*scope_info);
}
return succeeded;
@@ -369,8 +388,14 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
// Only allow non-global compiles for eval.
ASSERT(info->is_eval() || info->is_global());
-
- if (!ParserApi::Parse(info)) return Handle<SharedFunctionInfo>::null();
+ ParsingFlags flags = kNoParsingFlags;
+ if (info->pre_parse_data() != NULL ||
+ String::cast(script->source())->length() > FLAG_min_preparse_length) {
+ flags = kAllowLazy;
+ }
+ if (!ParserApi::Parse(info, flags)) {
+ return Handle<SharedFunctionInfo>::null();
+ }
// Measure how long it takes to do the compilation; only take the
// rest of the function into account to avoid overlap with the
@@ -384,7 +409,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
FunctionLiteral* lit = info->function();
LiveEditFunctionTracker live_edit_tracker(isolate, lit);
if (!MakeCode(info)) {
- isolate->StackOverflow();
+ if (!isolate->has_pending_exception()) isolate->StackOverflow();
return Handle<SharedFunctionInfo>::null();
}
@@ -395,7 +420,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
lit->name(),
lit->materialized_literal_count(),
info->code(),
- SerializedScopeInfo::Create(info->scope()));
+ ScopeInfo::Create(info->scope()));
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
Compiler::SetFunctionInfo(result, lit, true, script);
@@ -428,6 +453,9 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
// the instances of the function.
SetExpectedNofPropertiesFromEstimate(result, lit->expected_property_count());
+ script->set_compilation_state(
+ Smi::FromInt(Script::COMPILATION_STATE_COMPILED));
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// Notify debugger
isolate->debugger()->OnAfterCompile(
@@ -445,10 +473,9 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
int line_offset,
int column_offset,
v8::Extension* extension,
- ScriptDataImpl* input_pre_data,
+ ScriptDataImpl* pre_data,
Handle<Object> script_data,
- NativesFlag natives,
- v8::Script::CompileFlags compile_flags) {
+ NativesFlag natives) {
Isolate* isolate = source->GetIsolate();
int source_length = source->length();
isolate->counters()->total_load_size()->Increment(source_length);
@@ -477,29 +504,10 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
// for small sources, odds are that there aren't many functions
// that would be compiled lazily anyway, so we skip the preparse step
// in that case too.
- ScriptDataImpl* pre_data = input_pre_data;
- int flags = kNoParsingFlags;
- if ((natives == NATIVES_CODE) || FLAG_allow_natives_syntax) {
- flags |= kAllowNativesSyntax;
- }
- if (natives != NATIVES_CODE && FLAG_harmony_scoping) {
- flags |= kHarmonyScoping;
- }
- if (pre_data == NULL
- && source_length >= FLAG_min_preparse_length) {
- if (source->IsExternalTwoByteString()) {
- ExternalTwoByteStringUC16CharacterStream stream(
- Handle<ExternalTwoByteString>::cast(source), 0, source->length());
- pre_data = ParserApi::PartialPreParse(&stream, extension, flags);
- } else {
- GenericStringUC16CharacterStream stream(source, 0, source->length());
- pre_data = ParserApi::PartialPreParse(&stream, extension, flags);
- }
- }
// Create a script object describing the script to be compiled.
Handle<Script> script = FACTORY->NewScript(source);
- if (natives == NATIVES_CODE || compile_flags & v8::Script::NativeMode) {
+ if (natives == NATIVES_CODE) {
script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
}
if (!script_name.is_null()) {
@@ -516,15 +524,16 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
info.MarkAsGlobal();
info.SetExtension(extension);
info.SetPreParseData(pre_data);
- if (compile_flags & v8::Script::QmlMode) info.MarkAsQmlMode();
+ if (FLAG_use_strict) {
+ info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
+ }
result = MakeFunctionInfo(&info);
if (extension == NULL && !result.is_null()) {
compilation_cache->PutScript(source, result);
}
-
- // Get rid of the pre-parsing data (if necessary).
- if (input_pre_data == NULL && pre_data != NULL) {
- delete pre_data;
+ } else {
+ if (result->ic_age() != HEAP->global_ic_age()) {
+ result->ResetForNewContext(HEAP->global_ic_age());
}
}
@@ -536,8 +545,8 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
Handle<Context> context,
bool is_global,
- StrictModeFlag strict_mode,
- bool qml_mode) {
+ LanguageMode language_mode,
+ int scope_position) {
Isolate* isolate = source->GetIsolate();
int source_length = source->length();
isolate->counters()->total_eval_size()->Increment(source_length);
@@ -553,7 +562,8 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
result = compilation_cache->LookupEval(source,
context,
is_global,
- strict_mode);
+ language_mode,
+ scope_position);
if (result.is_null()) {
// Create a script object describing the script to be compiled.
@@ -561,18 +571,28 @@ Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
CompilationInfo info(script);
info.MarkAsEval();
if (is_global) info.MarkAsGlobal();
- info.SetStrictModeFlag(strict_mode);
- if (qml_mode) info.MarkAsQmlMode();
+ info.SetLanguageMode(language_mode);
info.SetCallingContext(context);
result = MakeFunctionInfo(&info);
if (!result.is_null()) {
- CompilationCache* compilation_cache = isolate->compilation_cache();
- // If caller is strict mode, the result must be strict as well,
- // but not the other way around. Consider:
+ // Explicitly disable optimization for eval code. We're not yet prepared
+ // to handle eval-code in the optimizing compiler.
+ result->DisableOptimization();
+
+ // If caller is strict mode, the result must be in strict mode or
+ // extended mode as well, but not the other way around. Consider:
// eval("'use strict'; ...");
- // TODO(keuchel): adapt this for extended mode.
- ASSERT(strict_mode == kNonStrictMode || result->strict_mode());
- compilation_cache->PutEval(source, context, is_global, result);
+ ASSERT(language_mode != STRICT_MODE || !result->is_classic_mode());
+ // If caller is in extended mode, the result must also be in
+ // extended mode.
+ ASSERT(language_mode != EXTENDED_MODE ||
+ result->is_extended_mode());
+ compilation_cache->PutEval(
+ source, context, is_global, result, scope_position);
+ }
+ } else {
+ if (result->ic_age() != HEAP->global_ic_age()) {
+ result->ResetForNewContext(HEAP->global_ic_age());
}
}
@@ -595,26 +615,16 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
isolate->counters()->total_compile_size()->Increment(compiled_size);
// Generate the AST for the lazily compiled function.
- if (ParserApi::Parse(info)) {
+ if (ParserApi::Parse(info, kNoParsingFlags)) {
// Measure how long it takes to do the lazy compilation; only take the
// rest of the function into account to avoid overlap with the lazy
// parsing statistics.
HistogramTimerScope timer(isolate->counters()->compile_lazy());
- // After parsing we know function's strict mode. Remember it.
- StrictModeFlag strict_mode = info->function()->strict_mode_flag();
- ASSERT(info->strict_mode_flag() == kNonStrictMode ||
- info->strict_mode_flag() == strict_mode);
- ASSERT(shared->strict_mode_flag() == kNonStrictMode ||
- shared->strict_mode_flag() == strict_mode);
- info->SetStrictModeFlag(strict_mode);
- shared->set_strict_mode_flag(strict_mode);
-
- // After parsing we know function's qml mode. Remember it.
- if (info->function()->qml_mode()) {
- shared->set_qml_mode(true);
- info->MarkAsQmlMode();
- }
+ // After parsing we know the function's language mode. Remember it.
+ LanguageMode language_mode = info->function()->language_mode();
+ info->SetLanguageMode(language_mode);
+ shared->set_language_mode(language_mode);
// Compile the code.
if (!MakeCode(info)) {
@@ -633,16 +643,15 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
if (info->IsOptimizing()) {
- ASSERT(shared->scope_info() != SerializedScopeInfo::Empty());
+ ASSERT(shared->scope_info() != ScopeInfo::Empty());
function->ReplaceCode(*code);
} else {
// Update the shared function info with the compiled code and the
// scope info. Please note, that the order of the shared function
// info initialization is important since set_scope_info might
// trigger a GC, causing the ASSERT below to be invalid if the code
- // was flushed. By settting the code object last we avoid this.
- Handle<SerializedScopeInfo> scope_info =
- SerializedScopeInfo::Create(info->scope());
+ // was flushed. By setting the code object last we avoid this.
+ Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope());
shared->set_scope_info(*scope_info);
shared->set_code(*code);
if (!function.is_null()) {
@@ -665,8 +674,13 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Check the function has compiled code.
ASSERT(shared->is_compiled());
shared->set_code_age(0);
+ shared->set_dont_optimize(lit->flags()->Contains(kDontOptimize));
+ shared->set_dont_inline(lit->flags()->Contains(kDontInline));
+ shared->set_ast_node_count(lit->ast_node_count());
- if (info->AllowOptimize() && !shared->optimization_disabled()) {
+ if (V8::UseCrankshaft()&&
+ !function.is_null() &&
+ !shared->optimization_disabled()) {
// If we're asked to always optimize, we compile the optimized
// version of the function right away - unless the debugger is
// active as it makes no sense to compile optimized code then.
@@ -694,7 +708,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
CompilationInfo info(script);
info.SetFunction(literal);
info.SetScope(literal->scope());
- info.SetStrictModeFlag(literal->scope()->strict_mode_flag());
+ info.SetLanguageMode(literal->scope()->language_mode());
LiveEditFunctionTracker live_edit_tracker(info.isolate(), literal);
// Determine if the function can be lazily compiled. This is necessary to
@@ -705,7 +719,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
bool allow_lazy = literal->AllowsLazyCompilation() &&
!LiveEditFunctionTracker::IsActive(info.isolate());
- Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
+ Handle<ScopeInfo> scope_info(ScopeInfo::Empty());
// Generate code
if (FLAG_lazy && allow_lazy) {
@@ -714,7 +728,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
} else if ((V8::UseCrankshaft() && MakeCrankshaftCode(&info)) ||
(!V8::UseCrankshaft() && FullCodeGenerator::MakeCode(&info))) {
ASSERT(!info.code().is_null());
- scope_info = SerializedScopeInfo::Create(info.scope());
+ scope_info = ScopeInfo::Create(info.scope());
} else {
return Handle<SharedFunctionInfo>::null();
}
@@ -746,8 +760,8 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
FunctionLiteral* lit,
bool is_toplevel,
Handle<Script> script) {
- function_info->set_length(lit->num_parameters());
- function_info->set_formal_parameter_count(lit->num_parameters());
+ function_info->set_length(lit->parameter_count());
+ function_info->set_formal_parameter_count(lit->parameter_count());
function_info->set_script(*script);
function_info->set_function_token_position(lit->function_token_position());
function_info->set_start_position(lit->start_position());
@@ -760,10 +774,13 @@ void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
lit->has_only_simple_this_property_assignments(),
*lit->this_property_assignments());
function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
- function_info->set_strict_mode_flag(lit->strict_mode_flag());
- function_info->set_qml_mode(lit->qml_mode());
+ function_info->set_language_mode(lit->language_mode());
function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
+ function_info->set_ast_node_count(lit->ast_node_count());
+ function_info->set_is_function(lit->is_function());
+ function_info->set_dont_optimize(lit->flags()->Contains(kDontOptimize));
+ function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
}
diff --git a/src/3rdparty/v8/src/compiler.h b/src/3rdparty/v8/src/compiler.h
index 054e3b9..44df9e0 100644
--- a/src/3rdparty/v8/src/compiler.h
+++ b/src/3rdparty/v8/src/compiler.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -52,14 +52,15 @@ class CompilationInfo BASE_EMBEDDED {
bool is_lazy() const { return IsLazy::decode(flags_); }
bool is_eval() const { return IsEval::decode(flags_); }
bool is_global() const { return IsGlobal::decode(flags_); }
- bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; }
- StrictModeFlag strict_mode_flag() const {
- return StrictModeFlagField::decode(flags_);
+ bool is_classic_mode() const { return language_mode() == CLASSIC_MODE; }
+ bool is_extended_mode() const { return language_mode() == EXTENDED_MODE; }
+ LanguageMode language_mode() const {
+ return LanguageModeField::decode(flags_);
}
bool is_in_loop() const { return IsInLoop::decode(flags_); }
- bool is_qml_mode() const { return IsQmlMode::decode(flags_); }
FunctionLiteral* function() const { return function_; }
Scope* scope() const { return scope_; }
+ Scope* global_scope() const { return global_scope_; }
Handle<Code> code() const { return code_; }
Handle<JSFunction> closure() const { return closure_; }
Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
@@ -77,18 +78,16 @@ class CompilationInfo BASE_EMBEDDED {
ASSERT(!is_lazy());
flags_ |= IsGlobal::encode(true);
}
- void SetStrictModeFlag(StrictModeFlag strict_mode_flag) {
- ASSERT(StrictModeFlagField::decode(flags_) == kNonStrictMode ||
- StrictModeFlagField::decode(flags_) == strict_mode_flag);
- flags_ = StrictModeFlagField::update(flags_, strict_mode_flag);
+ void SetLanguageMode(LanguageMode language_mode) {
+ ASSERT(this->language_mode() == CLASSIC_MODE ||
+ this->language_mode() == language_mode ||
+ language_mode == EXTENDED_MODE);
+ flags_ = LanguageModeField::update(flags_, language_mode);
}
void MarkAsInLoop() {
ASSERT(is_lazy());
flags_ |= IsInLoop::encode(true);
}
- void MarkAsQmlMode() {
- flags_ |= IsQmlMode::encode(true);
- }
void MarkAsNative() {
flags_ |= IsNative::encode(true);
}
@@ -103,6 +102,10 @@ class CompilationInfo BASE_EMBEDDED {
ASSERT(scope_ == NULL);
scope_ = scope;
}
+ void SetGlobalScope(Scope* global_scope) {
+ ASSERT(global_scope_ == NULL);
+ global_scope_ = global_scope;
+ }
void SetCode(Handle<Code> code) { code_ = code; }
void SetExtension(v8::Extension* extension) {
ASSERT(!is_lazy());
@@ -160,10 +163,8 @@ class CompilationInfo BASE_EMBEDDED {
flags_ |= SupportsDeoptimization::encode(true);
}
- // Determine whether or not we can adaptively optimize.
- bool AllowOptimize() {
- return V8::UseCrankshaft() && !closure_.is_null();
- }
+ // Determines whether or not to insert a self-optimization header.
+ bool ShouldSelfOptimize();
// Disable all optimization attempts of this info for the rest of the
// current compilation pipeline.
@@ -175,9 +176,8 @@ class CompilationInfo BASE_EMBEDDED {
// Compilation mode.
// BASE is generated by the full codegen, optionally prepared for bailouts.
// OPTIMIZE is optimized code generated by the Hydrogen-based backend.
- // NONOPT is generated by the full codegen or the classic backend
- // and is not prepared for recompilation/bailouts. These functions
- // are never recompiled.
+ // NONOPT is generated by the full codegen and is not prepared for
+ // recompilation/bailouts. These functions are never recompiled.
enum Mode {
BASE,
OPTIMIZE,
@@ -193,11 +193,8 @@ class CompilationInfo BASE_EMBEDDED {
MarkAsNative();
}
if (!shared_info_.is_null()) {
- ASSERT(strict_mode_flag() == kNonStrictMode);
- SetStrictModeFlag(shared_info_->strict_mode_flag());
- }
- if (!shared_info_.is_null() && shared_info_->qml_mode()) {
- MarkAsQmlMode();
+ ASSERT(language_mode() == CLASSIC_MODE);
+ SetLanguageMode(shared_info_->language_mode());
}
}
@@ -217,7 +214,7 @@ class CompilationInfo BASE_EMBEDDED {
// Flags that can be set for lazy compilation.
class IsInLoop: public BitField<bool, 3, 1> {};
// Strict mode - used in eager compilation.
- class StrictModeFlagField: public BitField<StrictModeFlag, 4, 1> {};
+ class LanguageModeField: public BitField<LanguageMode, 4, 2> {};
// Is this a function from our natives.
class IsNative: public BitField<bool, 6, 1> {};
// Is this code being compiled with support for deoptimization..
@@ -225,8 +222,7 @@ class CompilationInfo BASE_EMBEDDED {
// If compiling for debugging produce just full code matching the
// initial mode setting.
class IsCompilingForDebugging: public BitField<bool, 8, 1> {};
- // Qml mode
- class IsQmlMode: public BitField<bool, 9, 1> {};
+
unsigned flags_;
@@ -236,6 +232,8 @@ class CompilationInfo BASE_EMBEDDED {
// The scope of the function literal as a convenience. Set to indicate
// that scopes have been analyzed.
Scope* scope_;
+ // The global scope provided as a convenience.
+ Scope* global_scope_;
// The compiled code.
Handle<Code> code_;
@@ -279,6 +277,9 @@ class Compiler : public AllStatic {
static const int kMaxInliningLevels = 3;
+ // Call count before primitive functions trigger their own optimization.
+ static const int kCallsUntilPrimitiveOpt = 200;
+
// All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle
// contains NULL.
@@ -291,15 +292,14 @@ class Compiler : public AllStatic {
v8::Extension* extension,
ScriptDataImpl* pre_data,
Handle<Object> script_data,
- NativesFlag is_natives_code,
- v8::Script::CompileFlags = v8::Script::Default);
+ NativesFlag is_natives_code);
// Compile a String source within a context for Eval.
static Handle<SharedFunctionInfo> CompileEval(Handle<String> source,
Handle<Context> context,
bool is_global,
- StrictModeFlag strict_mode,
- bool qml_mode);
+ LanguageMode language_mode,
+ int scope_position);
// Compile from function info (used for lazy compilation). Returns true on
// success and false if the compilation resulted in a stack overflow.
diff --git a/src/3rdparty/v8/src/contexts.cc b/src/3rdparty/v8/src/contexts.cc
index 3129af0..76784bd 100644
--- a/src/3rdparty/v8/src/contexts.cc
+++ b/src/3rdparty/v8/src/contexts.cc
@@ -103,9 +103,6 @@ Handle<Object> Context::Lookup(Handle<String> name,
PrintF(")\n");
}
- Handle<JSObject> qml_global;
- Handle<JSObject> qml_global_global;
-
do {
if (FLAG_trace_contexts) {
PrintF(" - looking in context %p", reinterpret_cast<void*>(*context));
@@ -113,11 +110,6 @@ Handle<Object> Context::Lookup(Handle<String> name,
PrintF("\n");
}
- if (qml_global.is_null() && !context->qml_global()->IsUndefined()) {
- qml_global = Handle<JSObject>(context->qml_global(), isolate);
- qml_global_global = Handle<JSObject>(context->global(), isolate);
- }
-
// 1. Check global objects, subjects of with, and extension objects.
if (context->IsGlobalContext() ||
context->IsWithContext() ||
@@ -145,16 +137,17 @@ Handle<Object> Context::Lookup(Handle<String> name,
if (context->IsFunctionContext() || context->IsBlockContext()) {
// Use serialized scope information of functions and blocks to search
// for the context index.
- Handle<SerializedScopeInfo> scope_info;
+ Handle<ScopeInfo> scope_info;
if (context->IsFunctionContext()) {
- scope_info = Handle<SerializedScopeInfo>(
+ scope_info = Handle<ScopeInfo>(
context->closure()->shared()->scope_info(), isolate);
} else {
- scope_info = Handle<SerializedScopeInfo>(
- SerializedScopeInfo::cast(context->extension()), isolate);
+ scope_info = Handle<ScopeInfo>(
+ ScopeInfo::cast(context->extension()), isolate);
}
VariableMode mode;
- int slot_index = scope_info->ContextSlotIndex(*name, &mode);
+ InitializationFlag init_flag;
+ int slot_index = scope_info->ContextSlotIndex(*name, &mode, &init_flag);
ASSERT(slot_index < 0 || slot_index >= MIN_CONTEXT_SLOTS);
if (slot_index >= 0) {
if (FLAG_trace_contexts) {
@@ -176,15 +169,19 @@ Handle<Object> Context::Lookup(Handle<String> name,
break;
case LET:
*attributes = NONE;
- *binding_flags = MUTABLE_CHECK_INITIALIZED;
+ *binding_flags = (init_flag == kNeedsInitialization)
+ ? MUTABLE_CHECK_INITIALIZED : MUTABLE_IS_INITIALIZED;
break;
case CONST:
*attributes = READ_ONLY;
- *binding_flags = IMMUTABLE_CHECK_INITIALIZED;
+ *binding_flags = (init_flag == kNeedsInitialization)
+ ? IMMUTABLE_CHECK_INITIALIZED : IMMUTABLE_IS_INITIALIZED;
break;
case CONST_HARMONY:
*attributes = READ_ONLY;
- *binding_flags = IMMUTABLE_CHECK_INITIALIZED_HARMONY;
+ *binding_flags = (init_flag == kNeedsInitialization)
+ ? IMMUTABLE_CHECK_INITIALIZED_HARMONY :
+ IMMUTABLE_IS_INITIALIZED_HARMONY;
break;
case DYNAMIC:
case DYNAMIC_GLOBAL:
@@ -236,33 +233,6 @@ Handle<Object> Context::Lookup(Handle<String> name,
}
} while (follow_context_chain);
- if (!qml_global.is_null()) {
- if ((flags & FOLLOW_PROTOTYPE_CHAIN) == 0) {
- *attributes = qml_global_global->GetLocalPropertyAttribute(*name);
- } else {
- *attributes = qml_global_global->GetPropertyAttribute(*name);
- }
-
- if (*attributes != ABSENT) {
- *attributes = ABSENT;
- } else {
- if ((flags & FOLLOW_PROTOTYPE_CHAIN) == 0) {
- *attributes = qml_global->GetLocalPropertyAttribute(*name);
- } else {
- *attributes = qml_global->GetPropertyAttribute(*name);
- }
-
- if (*attributes != ABSENT) {
- // property found
- if (FLAG_trace_contexts) {
- PrintF("=> found property in qml global object %p\n",
- reinterpret_cast<void*>(*qml_global));
- }
- return qml_global;
- }
- }
- }
-
if (FLAG_trace_contexts) {
PrintF("=> no property/slot found\n");
}
@@ -270,64 +240,6 @@ Handle<Object> Context::Lookup(Handle<String> name,
}
-bool Context::GlobalIfNotShadowedByEval(Handle<String> name) {
- Context* context = this;
-
- // Check that there is no local with the given name in contexts
- // before the global context and check that there are no context
- // extension objects (conservative check for with statements).
- while (!context->IsGlobalContext()) {
- // Check if the context is a catch or with context, or has introduced
- // bindings by calling non-strict eval.
- if (context->has_extension()) return false;
-
- // Not a with context so it must be a function context.
- ASSERT(context->IsFunctionContext());
-
- // Check non-parameter locals.
- Handle<SerializedScopeInfo> scope_info(
- context->closure()->shared()->scope_info());
- VariableMode mode;
- int index = scope_info->ContextSlotIndex(*name, &mode);
- ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
- if (index >= 0) return false;
-
- // Check parameter locals.
- int param_index = scope_info->ParameterIndex(*name);
- if (param_index >= 0) return false;
-
- // Check context only holding the function name variable.
- index = scope_info->FunctionContextSlotIndex(*name, NULL);
- if (index >= 0) return false;
- context = context->previous();
- }
-
- // No local or potential with statement found so the variable is
- // global unless it is shadowed by an eval-introduced variable.
- return true;
-}
-
-
-void Context::ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval) {
- // Skip up the context chain checking all the function contexts to see
- // whether they call eval.
- Context* context = this;
- while (!context->IsGlobalContext()) {
- if (context->IsFunctionContext()) {
- Handle<SerializedScopeInfo> scope_info(
- context->closure()->shared()->scope_info());
- if (scope_info->CallsEval() && !scope_info->IsStrictMode()) {
- // No need to go further since the answers will not change from
- // here.
- *outer_scope_calls_non_strict_eval = true;
- return;
- }
- }
- context = context->previous();
- }
-}
-
-
void Context::AddOptimizedFunction(JSFunction* function) {
ASSERT(IsGlobalContext());
#ifdef DEBUG
diff --git a/src/3rdparty/v8/src/contexts.h b/src/3rdparty/v8/src/contexts.h
index c3cfeee..af5cb03 100644
--- a/src/3rdparty/v8/src/contexts.h
+++ b/src/3rdparty/v8/src/contexts.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -104,7 +104,11 @@ enum BindingFlags {
V(STRING_FUNCTION_INDEX, JSFunction, string_function) \
V(STRING_FUNCTION_PROTOTYPE_MAP_INDEX, Map, string_function_prototype_map) \
V(OBJECT_FUNCTION_INDEX, JSFunction, object_function) \
+ V(INTERNAL_ARRAY_FUNCTION_INDEX, JSFunction, internal_array_function) \
V(ARRAY_FUNCTION_INDEX, JSFunction, array_function) \
+ V(SMI_JS_ARRAY_MAP_INDEX, Object, smi_js_array_map) \
+ V(DOUBLE_JS_ARRAY_MAP_INDEX, Object, double_js_array_map) \
+ V(OBJECT_JS_ARRAY_MAP_INDEX, Object, object_js_array_map) \
V(DATE_FUNCTION_INDEX, JSFunction, date_function) \
V(JSON_OBJECT_INDEX, JSObject, json_object) \
V(REGEXP_FUNCTION_INDEX, JSFunction, regexp_function) \
@@ -128,7 +132,6 @@ enum BindingFlags {
V(FUNCTION_INSTANCE_MAP_INDEX, Map, function_instance_map) \
V(STRICT_MODE_FUNCTION_INSTANCE_MAP_INDEX, Map, \
strict_mode_function_instance_map) \
- V(JS_ARRAY_MAP_INDEX, Map, js_array_map)\
V(REGEXP_RESULT_MAP_INDEX, Map, regexp_result_map)\
V(ARGUMENTS_BOILERPLATE_INDEX, JSObject, arguments_boilerplate) \
V(ALIASED_ARGUMENTS_BOILERPLATE_INDEX, JSObject, \
@@ -218,7 +221,6 @@ class Context: public FixedArray {
// (with contexts), or the variable name (catch contexts), the serialized
// scope info (block contexts).
EXTENSION_INDEX,
- QML_GLOBAL_INDEX,
GLOBAL_INDEX,
MIN_CONTEXT_SLOTS,
@@ -231,7 +233,6 @@ class Context: public FixedArray {
ARGUMENTS_BOILERPLATE_INDEX,
ALIASED_ARGUMENTS_BOILERPLATE_INDEX,
STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX,
- JS_ARRAY_MAP_INDEX,
REGEXP_RESULT_MAP_INDEX,
FUNCTION_MAP_INDEX,
STRICT_MODE_FUNCTION_MAP_INDEX,
@@ -245,7 +246,11 @@ class Context: public FixedArray {
STRING_FUNCTION_INDEX,
STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
OBJECT_FUNCTION_INDEX,
+ INTERNAL_ARRAY_FUNCTION_INDEX,
ARRAY_FUNCTION_INDEX,
+ SMI_JS_ARRAY_MAP_INDEX,
+ DOUBLE_JS_ARRAY_MAP_INDEX,
+ OBJECT_JS_ARRAY_MAP_INDEX,
DATE_FUNCTION_INDEX,
JSON_OBJECT_INDEX,
REGEXP_FUNCTION_INDEX,
@@ -322,9 +327,6 @@ class Context: public FixedArray {
}
void set_global(GlobalObject* global) { set(GLOBAL_INDEX, global); }
- JSObject *qml_global() { return reinterpret_cast<JSObject *>(get(QML_GLOBAL_INDEX)); }
- void set_qml_global(JSObject *qml_global) { set(QML_GLOBAL_INDEX, qml_global); }
-
// Returns a JSGlobalProxy object or null.
JSObject* global_proxy();
void set_global_proxy(JSObject* global);
@@ -354,6 +356,10 @@ class Context: public FixedArray {
Map* map = this->map();
return map == map->GetHeap()->block_context_map();
}
+ bool IsModuleContext() {
+ Map* map = this->map();
+ return map == map->GetHeap()->module_context_map();
+ }
// Tells whether the global context is marked with out of memory.
inline bool has_out_of_memory();
@@ -367,6 +373,18 @@ class Context: public FixedArray {
Object* OptimizedFunctionsListHead();
void ClearOptimizedFunctions();
+ static int GetContextMapIndexFromElementsKind(
+ ElementsKind elements_kind) {
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ return Context::DOUBLE_JS_ARRAY_MAP_INDEX;
+ } else if (elements_kind == FAST_ELEMENTS) {
+ return Context::OBJECT_JS_ARRAY_MAP_INDEX;
+ } else {
+ ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
+ return Context::SMI_JS_ARRAY_MAP_INDEX;
+ }
+ }
+
#define GLOBAL_CONTEXT_FIELD_ACCESSORS(index, type, name) \
void set_##name(type* value) { \
ASSERT(IsGlobalContext()); \
@@ -402,19 +420,6 @@ class Context: public FixedArray {
PropertyAttributes* attributes,
BindingFlags* binding_flags);
- // Determine if a local variable with the given name exists in a
- // context. Do not consider context extension objects. This is
- // used for compiling code using eval. If the context surrounding
- // the eval call does not have a local variable with this name and
- // does not contain a with statement the property is global unless
- // it is shadowed by a property in an extension object introduced by
- // eval.
- bool GlobalIfNotShadowedByEval(Handle<String> name);
-
- // Determine if any function scope in the context call eval and if
- // any of those calls are in non-strict mode.
- void ComputeEvalScopeInfo(bool* outer_scope_calls_non_strict_eval);
-
// Code generation support.
static int SlotOffset(int index) {
return kHeaderSize + index * kPointerSize - kHeapObjectTag;
diff --git a/src/3rdparty/v8/src/conversions-inl.h b/src/3rdparty/v8/src/conversions-inl.h
index 8bc11bf..b098a1c 100644
--- a/src/3rdparty/v8/src/conversions-inl.h
+++ b/src/3rdparty/v8/src/conversions-inl.h
@@ -46,7 +46,7 @@
namespace v8 {
namespace internal {
-static inline double JunkStringValue() {
+inline double JunkStringValue() {
return BitCast<double, uint64_t>(kQuietNaNMask);
}
@@ -54,7 +54,7 @@ static inline double JunkStringValue() {
// The fast double-to-unsigned-int conversion routine does not guarantee
// rounding towards zero, or any reasonable value if the argument is larger
// than what fits in an unsigned 32-bit integer.
-static inline unsigned int FastD2UI(double x) {
+inline unsigned int FastD2UI(double x) {
// There is no unsigned version of lrint, so there is no fast path
// in this function as there is in FastD2I. Using lrint doesn't work
// for values of 2^31 and above.
@@ -80,7 +80,7 @@ static inline unsigned int FastD2UI(double x) {
}
-static inline double DoubleToInteger(double x) {
+inline double DoubleToInteger(double x) {
if (isnan(x)) return 0;
if (!isfinite(x) || x == 0) return x;
return (x >= 0) ? floor(x) : ceil(x);
@@ -103,9 +103,9 @@ int32_t DoubleToInt32(double x) {
template <class Iterator, class EndMark>
-static bool SubStringEquals(Iterator* current,
- EndMark end,
- const char* substring) {
+bool SubStringEquals(Iterator* current,
+ EndMark end,
+ const char* substring) {
ASSERT(**current == *substring);
for (substring++; *substring != '\0'; substring++) {
++*current;
@@ -119,9 +119,9 @@ static bool SubStringEquals(Iterator* current,
// Returns true if a nonspace character has been found and false if the
// end was been reached before finding a nonspace character.
template <class Iterator, class EndMark>
-static inline bool AdvanceToNonspace(UnicodeCache* unicode_cache,
- Iterator* current,
- EndMark end) {
+inline bool AdvanceToNonspace(UnicodeCache* unicode_cache,
+ Iterator* current,
+ EndMark end) {
while (*current != end) {
if (!unicode_cache->IsWhiteSpace(**current)) return true;
++*current;
@@ -132,11 +132,11 @@ static inline bool AdvanceToNonspace(UnicodeCache* unicode_cache,
// Parsing integers with radix 2, 4, 8, 16, 32. Assumes current != end.
template <int radix_log_2, class Iterator, class EndMark>
-static double InternalStringToIntDouble(UnicodeCache* unicode_cache,
- Iterator current,
- EndMark end,
- bool negative,
- bool allow_trailing_junk) {
+double InternalStringToIntDouble(UnicodeCache* unicode_cache,
+ Iterator current,
+ EndMark end,
+ bool negative,
+ bool allow_trailing_junk) {
ASSERT(current != end);
// Skip leading 0s.
@@ -235,10 +235,10 @@ static double InternalStringToIntDouble(UnicodeCache* unicode_cache,
template <class Iterator, class EndMark>
-static double InternalStringToInt(UnicodeCache* unicode_cache,
- Iterator current,
- EndMark end,
- int radix) {
+double InternalStringToInt(UnicodeCache* unicode_cache,
+ Iterator current,
+ EndMark end,
+ int radix) {
const bool allow_trailing_junk = true;
const double empty_string_val = JunkStringValue();
@@ -430,11 +430,11 @@ static double InternalStringToInt(UnicodeCache* unicode_cache,
// 2. *current - gets the current character in the sequence.
// 3. ++current (advances the position).
template <class Iterator, class EndMark>
-static double InternalStringToDouble(UnicodeCache* unicode_cache,
- Iterator current,
- EndMark end,
- int flags,
- double empty_string_val) {
+double InternalStringToDouble(UnicodeCache* unicode_cache,
+ Iterator current,
+ EndMark end,
+ int flags,
+ double empty_string_val) {
// To make sure that iterator dereferencing is valid the following
// convention is used:
// 1. Each '++current' statement is followed by check for equality to 'end'.
diff --git a/src/3rdparty/v8/src/conversions.h b/src/3rdparty/v8/src/conversions.h
index 31aaf6b..70559c9 100644
--- a/src/3rdparty/v8/src/conversions.h
+++ b/src/3rdparty/v8/src/conversions.h
@@ -45,14 +45,14 @@ class UnicodeCache;
const int kMaxSignificantDigits = 772;
-static inline bool isDigit(int x, int radix) {
+inline bool isDigit(int x, int radix) {
return (x >= '0' && x <= '9' && x < '0' + radix)
|| (radix > 10 && x >= 'a' && x < 'a' + radix - 10)
|| (radix > 10 && x >= 'A' && x < 'A' + radix - 10);
}
-static inline double SignedZero(bool negative) {
+inline double SignedZero(bool negative) {
return negative ? -0.0 : 0.0;
}
@@ -61,16 +61,16 @@ static inline double SignedZero(bool negative) {
// rounding towards zero.
// The result is unspecified if x is infinite or NaN, or if the rounded
// integer value is outside the range of type int.
-static inline int FastD2I(double x) {
+inline int FastD2I(double x) {
// The static_cast convertion from double to int used to be slow, but
// as new benchmarks show, now it is much faster than lrint().
return static_cast<int>(x);
}
-static inline unsigned int FastD2UI(double x);
+inline unsigned int FastD2UI(double x);
-static inline double FastI2D(int x) {
+inline double FastI2D(int x) {
// There is no rounding involved in converting an integer to a
// double, so this code should compile to a few instructions without
// any FPU pipeline stalls.
@@ -78,7 +78,7 @@ static inline double FastI2D(int x) {
}
-static inline double FastUI2D(unsigned x) {
+inline double FastUI2D(unsigned x) {
// There is no rounding involved in converting an unsigned integer to a
// double, so this code should compile to a few instructions without
// any FPU pipeline stalls.
@@ -87,15 +87,15 @@ static inline double FastUI2D(unsigned x) {
// This function should match the exact semantics of ECMA-262 9.4.
-static inline double DoubleToInteger(double x);
+inline double DoubleToInteger(double x);
// This function should match the exact semantics of ECMA-262 9.5.
-static inline int32_t DoubleToInt32(double x);
+inline int32_t DoubleToInt32(double x);
// This function should match the exact semantics of ECMA-262 9.6.
-static inline uint32_t DoubleToUint32(double x) {
+inline uint32_t DoubleToUint32(double x) {
return static_cast<uint32_t>(DoubleToInt32(x));
}
diff --git a/src/3rdparty/v8/src/cpu-profiler.cc b/src/3rdparty/v8/src/cpu-profiler.cc
index d74c034..3cbac77 100644
--- a/src/3rdparty/v8/src/cpu-profiler.cc
+++ b/src/3rdparty/v8/src/cpu-profiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -39,13 +39,14 @@
namespace v8 {
namespace internal {
-static const int kEventsBufferSize = 256*KB;
-static const int kTickSamplesBufferChunkSize = 64*KB;
+static const int kEventsBufferSize = 256 * KB;
+static const int kTickSamplesBufferChunkSize = 64 * KB;
static const int kTickSamplesBufferChunksCount = 16;
+static const int kProfilerStackSize = 64 * KB;
ProfilerEventsProcessor::ProfilerEventsProcessor(ProfileGenerator* generator)
- : Thread("v8:ProfEvntProc"),
+ : Thread(Thread::Options("v8:ProfEvntProc", kProfilerStackSize)),
generator_(generator),
running_(true),
ticks_buffer_(sizeof(TickSampleEventRecord),
@@ -493,7 +494,7 @@ void CpuProfiler::StartProcessorIfNotStarted() {
NoBarrier_Store(&is_profiling_, true);
processor_->Start();
// Enumerate stuff we already have in the heap.
- if (isolate->heap()->HasBeenSetup()) {
+ if (isolate->heap()->HasBeenSetUp()) {
if (!FLAG_prof_browser_mode) {
bool saved_log_code_flag = FLAG_log_code;
FLAG_log_code = true;
@@ -562,7 +563,7 @@ void CpuProfiler::StopProcessor() {
}
-void CpuProfiler::Setup() {
+void CpuProfiler::SetUp() {
Isolate* isolate = Isolate::Current();
if (isolate->cpu_profiler() == NULL) {
isolate->set_cpu_profiler(new CpuProfiler());
diff --git a/src/3rdparty/v8/src/cpu-profiler.h b/src/3rdparty/v8/src/cpu-profiler.h
index a71c0e0..6e2e771 100644
--- a/src/3rdparty/v8/src/cpu-profiler.h
+++ b/src/3rdparty/v8/src/cpu-profiler.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,7 +41,6 @@ class CodeEntry;
class CodeMap;
class CpuProfile;
class CpuProfilesCollection;
-class HashMap;
class ProfileGenerator;
class TokenEnumerator;
@@ -204,7 +203,7 @@ namespace internal {
// TODO(isolates): isolatify this class.
class CpuProfiler {
public:
- static void Setup();
+ static void SetUp();
static void TearDown();
static void StartProfiling(const char* title);
@@ -230,11 +229,11 @@ class CpuProfiler {
Code* code, String* name);
static void CodeCreateEvent(Logger::LogEventsAndTags tag,
Code* code,
- SharedFunctionInfo *shared,
+ SharedFunctionInfo* shared,
String* name);
static void CodeCreateEvent(Logger::LogEventsAndTags tag,
Code* code,
- SharedFunctionInfo *shared,
+ SharedFunctionInfo* shared,
String* source, int line);
static void CodeCreateEvent(Logger::LogEventsAndTags tag,
Code* code, int args_count);
diff --git a/src/3rdparty/v8/src/cpu.h b/src/3rdparty/v8/src/cpu.h
index 2525484..247af71 100644
--- a/src/3rdparty/v8/src/cpu.h
+++ b/src/3rdparty/v8/src/cpu.h
@@ -53,7 +53,7 @@ namespace internal {
class CPU : public AllStatic {
public:
// Initializes the cpu architecture support. Called once at VM startup.
- static void Setup();
+ static void SetUp();
static bool SupportsCrankshaft();
diff --git a/src/3rdparty/v8/src/d8-debug.cc b/src/3rdparty/v8/src/d8-debug.cc
index 8fbc876..de0faa8 100644
--- a/src/3rdparty/v8/src/d8-debug.cc
+++ b/src/3rdparty/v8/src/d8-debug.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -169,7 +169,7 @@ void RemoteDebugger::Run() {
bool ok;
// Make sure that socket support is initialized.
- ok = i::Socket::Setup();
+ ok = i::Socket::SetUp();
if (!ok) {
printf("Unable to initialize socket support %d\n", i::Socket::LastError());
return;
@@ -310,9 +310,7 @@ void RemoteDebugger::HandleKeyboardCommand(char* command) {
Handle<Value> request =
Shell::DebugCommandToJSONRequest(String::New(command));
if (try_catch.HasCaught()) {
- v8::String::Utf8Value exception(try_catch.Exception());
- const char* exception_string = Shell::ToCString(exception);
- printf("%s\n", exception_string);
+ Shell::ReportException(&try_catch);
PrintPrompt();
return;
}
diff --git a/src/3rdparty/v8/src/d8-posix.cc b/src/3rdparty/v8/src/d8-posix.cc
index 289c3b0..8a278e4 100644
--- a/src/3rdparty/v8/src/d8-posix.cc
+++ b/src/3rdparty/v8/src/d8-posix.cc
@@ -366,7 +366,8 @@ static Handle<Value> GetStdout(int child_fd,
// We're disabling usage of waitid in Mac OS X because it doens't work for us:
// a parent process hangs on waiting while a child process is already a zombie.
// See http://code.google.com/p/v8/issues/detail?id=401.
-#if defined(WNOWAIT) && !defined(ANDROID) && !defined(__APPLE__)
+#if defined(WNOWAIT) && !defined(ANDROID) && !defined(__APPLE__) \
+ && !defined(__NetBSD__)
#if !defined(__FreeBSD__)
#define HAS_WAITID 1
#endif
diff --git a/src/3rdparty/v8/src/d8-readline.cc b/src/3rdparty/v8/src/d8-readline.cc
index 71be933..ed7721c 100644
--- a/src/3rdparty/v8/src/d8-readline.cc
+++ b/src/3rdparty/v8/src/d8-readline.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,10 +49,14 @@ namespace v8 {
class ReadLineEditor: public LineEditor {
public:
ReadLineEditor() : LineEditor(LineEditor::READLINE, "readline") { }
- virtual i::SmartArrayPointer<char> Prompt(const char* prompt);
+ virtual Handle<String> Prompt(const char* prompt);
virtual bool Open();
virtual bool Close();
virtual void AddHistory(const char* str);
+
+ static const char* kHistoryFileName;
+ static const int kMaxHistoryEntries;
+
private:
static char** AttemptedCompletion(const char* text, int start, int end);
static char* CompletionGenerator(const char* text, int state);
@@ -66,25 +70,38 @@ char ReadLineEditor::kWordBreakCharacters[] = {' ', '\t', '\n', '"',
'\0'};
+const char* ReadLineEditor::kHistoryFileName = ".d8_history";
+const int ReadLineEditor::kMaxHistoryEntries = 1000;
+
+
bool ReadLineEditor::Open() {
rl_initialize();
rl_attempted_completion_function = AttemptedCompletion;
rl_completer_word_break_characters = kWordBreakCharacters;
rl_bind_key('\t', rl_complete);
using_history();
- stifle_history(Shell::kMaxHistoryEntries);
- return read_history(Shell::kHistoryFileName) == 0;
+ stifle_history(kMaxHistoryEntries);
+ return read_history(kHistoryFileName) == 0;
}
bool ReadLineEditor::Close() {
- return write_history(Shell::kHistoryFileName) == 0;
+ return write_history(kHistoryFileName) == 0;
}
-i::SmartArrayPointer<char> ReadLineEditor::Prompt(const char* prompt) {
- char* result = readline(prompt);
- return i::SmartArrayPointer<char>(result);
+Handle<String> ReadLineEditor::Prompt(const char* prompt) {
+ char* result = NULL;
+ { // Release lock for blocking input.
+ Unlocker unlock(Isolate::GetCurrent());
+ result = readline(prompt);
+ }
+ if (result != NULL) {
+ AddHistory(result);
+ } else {
+ return Handle<String>();
+ }
+ return String::New(result);
}
@@ -118,10 +135,10 @@ char* ReadLineEditor::CompletionGenerator(const char* text, int state) {
static unsigned current_index;
static Persistent<Array> current_completions;
if (state == 0) {
- i::SmartArrayPointer<char> full_text(i::StrNDup(rl_line_buffer, rl_point));
HandleScope scope;
+ Local<String> full_text = String::New(rl_line_buffer, rl_point);
Handle<Array> completions =
- Shell::GetCompletions(String::New(text), String::New(*full_text));
+ Shell::GetCompletions(String::New(text), full_text);
current_completions = Persistent<Array>::New(completions);
current_index = 0;
}
diff --git a/src/3rdparty/v8/src/d8.cc b/src/3rdparty/v8/src/d8.cc
index 64ada2c..1e8b4c8 100644
--- a/src/3rdparty/v8/src/d8.cc
+++ b/src/3rdparty/v8/src/d8.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -66,11 +66,7 @@
namespace v8 {
-
-#ifndef V8_SHARED
LineEditor *LineEditor::first_ = NULL;
-const char* Shell::kHistoryFileName = ".d8_history";
-const int Shell::kMaxHistoryEntries = 1000;
LineEditor::LineEditor(Type type, const char* name)
@@ -96,36 +92,37 @@ LineEditor* LineEditor::Get() {
class DumbLineEditor: public LineEditor {
public:
DumbLineEditor() : LineEditor(LineEditor::DUMB, "dumb") { }
- virtual i::SmartArrayPointer<char> Prompt(const char* prompt);
+ virtual Handle<String> Prompt(const char* prompt);
};
static DumbLineEditor dumb_line_editor;
-i::SmartArrayPointer<char> DumbLineEditor::Prompt(const char* prompt) {
- static const int kBufferSize = 256;
- char buffer[kBufferSize];
+Handle<String> DumbLineEditor::Prompt(const char* prompt) {
printf("%s", prompt);
- char* str = fgets(buffer, kBufferSize, stdin);
- return i::SmartArrayPointer<char>(str ? i::StrDup(str) : str);
+ return Shell::ReadFromStdin();
}
+#ifndef V8_SHARED
CounterMap* Shell::counter_map_;
i::OS::MemoryMappedFile* Shell::counters_file_ = NULL;
CounterCollection Shell::local_counters_;
CounterCollection* Shell::counters_ = &local_counters_;
i::Mutex* Shell::context_mutex_(i::OS::CreateMutex());
Persistent<Context> Shell::utility_context_;
-LineEditor* Shell::console = NULL;
#endif // V8_SHARED
+LineEditor* Shell::console = NULL;
Persistent<Context> Shell::evaluation_context_;
ShellOptions Shell::options;
const char* Shell::kPrompt = "d8> ";
+const int MB = 1024 * 1024;
+
+
#ifndef V8_SHARED
bool CounterMap::Match(void* key1, void* key2) {
const char* name1 = reinterpret_cast<const char*>(key1);
@@ -238,7 +235,7 @@ Handle<Value> Shell::Read(const Arguments& args) {
}
-Handle<Value> Shell::ReadLine(const Arguments& args) {
+Handle<String> Shell::ReadFromStdin() {
static const int kBufferSize = 256;
char buffer[kBufferSize];
Handle<String> accumulator = String::New("");
@@ -247,7 +244,12 @@ Handle<Value> Shell::ReadLine(const Arguments& args) {
// Continue reading if the line ends with an escape '\\' or the line has
// not been fully read into the buffer yet (does not end with '\n').
// If fgets gets an error, just give up.
- if (fgets(buffer, kBufferSize, stdin) == NULL) return Null();
+ char* input = NULL;
+ { // Release lock for blocking input.
+ Unlocker unlock(Isolate::GetCurrent());
+ input = fgets(buffer, kBufferSize, stdin);
+ }
+ if (input == NULL) return Handle<String>();
length = static_cast<int>(strlen(buffer));
if (length == 0) {
return accumulator;
@@ -281,51 +283,172 @@ Handle<Value> Shell::Load(const Arguments& args) {
return Undefined();
}
+static size_t convertToUint(Local<Value> value_in, TryCatch* try_catch) {
+ if (value_in->IsUint32()) {
+ return value_in->Uint32Value();
+ }
+
+ Local<Value> number = value_in->ToNumber();
+ if (try_catch->HasCaught()) return 0;
+
+ ASSERT(number->IsNumber());
+ Local<Int32> int32 = number->ToInt32();
+ if (try_catch->HasCaught() || int32.IsEmpty()) return 0;
+
+ int32_t raw_value = int32->Int32Value();
+ if (try_catch->HasCaught()) return 0;
+
+ if (raw_value < 0) {
+ ThrowException(String::New("Array length must not be negative."));
+ return 0;
+ }
+
+ static const int kMaxLength = 0x3fffffff;
+#ifndef V8_SHARED
+ ASSERT(kMaxLength == i::ExternalArray::kMaxLength);
+#endif // V8_SHARED
+ if (raw_value > static_cast<int32_t>(kMaxLength)) {
+ ThrowException(
+ String::New("Array length exceeds maximum length."));
+ }
+ return static_cast<size_t>(raw_value);
+}
+
+
+const char kArrayBufferReferencePropName[] = "_is_array_buffer_";
+const char kArrayBufferMarkerPropName[] = "_array_buffer_ref_";
+
Handle<Value> Shell::CreateExternalArray(const Arguments& args,
ExternalArrayType type,
size_t element_size) {
+ TryCatch try_catch;
+ bool is_array_buffer_construct = element_size == 0;
+ if (is_array_buffer_construct) {
+ type = v8::kExternalByteArray;
+ element_size = 1;
+ }
ASSERT(element_size == 1 || element_size == 2 || element_size == 4 ||
element_size == 8);
- if (args.Length() != 1) {
+ if (args.Length() == 0) {
return ThrowException(
- String::New("Array constructor needs one parameter."));
+ String::New("Array constructor must have at least one "
+ "parameter."));
}
- static const int kMaxLength = 0x3fffffff;
-#ifndef V8_SHARED
- ASSERT(kMaxLength == i::ExternalArray::kMaxLength);
-#endif // V8_SHARED
- size_t length = 0;
- if (args[0]->IsUint32()) {
- length = args[0]->Uint32Value();
- } else {
- Local<Number> number = args[0]->ToNumber();
- if (number.IsEmpty() || !number->IsNumber()) {
- return ThrowException(String::New("Array length must be a number."));
+ bool first_arg_is_array_buffer =
+ args[0]->IsObject() &&
+ args[0]->ToObject()->Get(
+ String::New(kArrayBufferMarkerPropName))->IsTrue();
+ // Currently, only the following constructors are supported:
+ // TypedArray(unsigned long length)
+ // TypedArray(ArrayBuffer buffer,
+ // optional unsigned long byteOffset,
+ // optional unsigned long length)
+ if (args.Length() > 3) {
+ return ThrowException(
+ String::New("Array constructor from ArrayBuffer must "
+ "have 1-3 parameters."));
+ }
+
+ Local<Value> length_value = (args.Length() < 3)
+ ? (first_arg_is_array_buffer
+ ? args[0]->ToObject()->Get(String::New("length"))
+ : args[0])
+ : args[2];
+ size_t length = convertToUint(length_value, &try_catch);
+ if (try_catch.HasCaught()) return try_catch.Exception();
+
+ void* data = NULL;
+ size_t offset = 0;
+
+ Handle<Object> array = Object::New();
+ if (first_arg_is_array_buffer) {
+ Handle<Object> derived_from = args[0]->ToObject();
+ data = derived_from->GetIndexedPropertiesExternalArrayData();
+
+ size_t array_buffer_length = convertToUint(
+ derived_from->Get(String::New("length")),
+ &try_catch);
+ if (try_catch.HasCaught()) return try_catch.Exception();
+
+ if (data == NULL && array_buffer_length != 0) {
+ return ThrowException(
+ String::New("ArrayBuffer doesn't have data"));
}
- int32_t raw_length = number->ToInt32()->Int32Value();
- if (raw_length < 0) {
- return ThrowException(String::New("Array length must not be negative."));
+
+ if (args.Length() > 1) {
+ offset = convertToUint(args[1], &try_catch);
+ if (try_catch.HasCaught()) return try_catch.Exception();
+
+ // The given byteOffset must be a multiple of the element size of the
+ // specific type, otherwise an exception is raised.
+ if (offset % element_size != 0) {
+ return ThrowException(
+ String::New("offset must be multiple of element_size"));
+ }
}
- if (raw_length > static_cast<int32_t>(kMaxLength)) {
+
+ if (offset > array_buffer_length) {
return ThrowException(
- String::New("Array length exceeds maximum length."));
+ String::New("byteOffset must be less than ArrayBuffer length."));
}
- length = static_cast<size_t>(raw_length);
- }
- if (length > static_cast<size_t>(kMaxLength)) {
- return ThrowException(String::New("Array length exceeds maximum length."));
+
+ if (args.Length() == 2) {
+ // If length is not explicitly specified, the length of the ArrayBuffer
+ // minus the byteOffset must be a multiple of the element size of the
+ // specific type, or an exception is raised.
+ length = array_buffer_length - offset;
+ }
+
+ if (args.Length() != 3) {
+ if (length % element_size != 0) {
+ return ThrowException(
+ String::New("ArrayBuffer length minus the byteOffset must be a "
+ "multiple of the element size"));
+ }
+ length /= element_size;
+ }
+
+ // If a given byteOffset and length references an area beyond the end of
+ // the ArrayBuffer an exception is raised.
+ if (offset + (length * element_size) > array_buffer_length) {
+ return ThrowException(
+ String::New("length references an area beyond the end of the "
+ "ArrayBuffer"));
+ }
+
+ // Hold a reference to the ArrayBuffer so its buffer doesn't get collected.
+ array->Set(String::New(kArrayBufferReferencePropName), args[0], ReadOnly);
}
- void* data = calloc(length, element_size);
- if (data == NULL) {
- return ThrowException(String::New("Memory allocation failed."));
+
+ if (is_array_buffer_construct) {
+ array->Set(String::New(kArrayBufferMarkerPropName), True(), ReadOnly);
}
- Handle<Object> array = Object::New();
+
Persistent<Object> persistent_array = Persistent<Object>::New(array);
+ if (data == NULL && length != 0) {
+ // Make sure the total size fits into a (signed) int.
+ static const int kMaxSize = 0x7fffffff;
+ if (length > (kMaxSize - sizeof(size_t)) / element_size) {
+ return ThrowException(String::New("Array exceeds maximum size (2G)"));
+ }
+ // Prepend the size of the allocated chunk to the data itself.
+ int total_size = length * element_size + sizeof(size_t);
+ data = malloc(total_size);
+ if (data == NULL) {
+ return ThrowException(String::New("Memory allocation failed."));
+ }
+ *reinterpret_cast<size_t*>(data) = total_size;
+ data = reinterpret_cast<size_t*>(data) + 1;
+ memset(data, 0, length * element_size);
+ V8::AdjustAmountOfExternalAllocatedMemory(total_size);
+ }
persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
persistent_array.MarkIndependent();
- array->SetIndexedPropertiesToExternalArrayData(data, type,
- static_cast<int>(length));
+
+ array->SetIndexedPropertiesToExternalArrayData(
+ reinterpret_cast<uint8_t*>(data) + offset, type,
+ static_cast<int>(length));
array->Set(String::New("length"),
Int32::New(static_cast<int32_t>(length)), ReadOnly);
array->Set(String::New("BYTES_PER_ELEMENT"),
@@ -335,11 +458,25 @@ Handle<Value> Shell::CreateExternalArray(const Arguments& args,
void Shell::ExternalArrayWeakCallback(Persistent<Value> object, void* data) {
- free(data);
+ HandleScope scope;
+ Handle<String> prop_name = String::New(kArrayBufferReferencePropName);
+ Handle<Object> converted_object = object->ToObject();
+ Local<Value> prop_value = converted_object->Get(prop_name);
+ if (data != NULL && !prop_value->IsObject()) {
+ data = reinterpret_cast<size_t*>(data) - 1;
+ V8::AdjustAmountOfExternalAllocatedMemory(
+ -static_cast<int>(*reinterpret_cast<size_t*>(data)));
+ free(data);
+ }
object.Dispose();
}
+Handle<Value> Shell::ArrayBuffer(const Arguments& args) {
+ return CreateExternalArray(args, v8::kExternalByteArray, 0);
+}
+
+
Handle<Value> Shell::Int8Array(const Arguments& args) {
return CreateExternalArray(args, v8::kExternalByteArray, sizeof(int8_t));
}
@@ -411,6 +548,10 @@ Handle<Value> Shell::Version(const Arguments& args) {
void Shell::ReportException(v8::TryCatch* try_catch) {
HandleScope handle_scope;
+#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
+ bool enter_context = !Context::InContext();
+ if (enter_context) utility_context_->Enter();
+#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT
v8::String::Utf8Value exception(try_catch->Exception());
const char* exception_string = ToCString(exception);
Handle<Message> message = try_catch->Message();
@@ -445,6 +586,9 @@ void Shell::ReportException(v8::TryCatch* try_catch) {
}
}
printf("\n");
+#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
+ if (enter_context) utility_context_->Exit();
+#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT
}
@@ -482,6 +626,12 @@ Handle<Value> Shell::DebugCommandToJSONRequest(Handle<String> command) {
Handle<Value> val = Handle<Function>::Cast(fun)->Call(global, kArgc, argv);
return val;
}
+
+
+void Shell::DispatchDebugMessages() {
+ v8::Context::Scope scope(Shell::evaluation_context_);
+ v8::Debug::ProcessDebugMessages();
+}
#endif // ENABLE_DEBUGGER_SUPPORT
#endif // V8_SHARED
@@ -670,6 +820,8 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
global_template->Set(String::New("print"), FunctionTemplate::New(Print));
global_template->Set(String::New("write"), FunctionTemplate::New(Write));
global_template->Set(String::New("read"), FunctionTemplate::New(Read));
+ global_template->Set(String::New("readbinary"),
+ FunctionTemplate::New(ReadBinary));
global_template->Set(String::New("readline"),
FunctionTemplate::New(ReadLine));
global_template->Set(String::New("load"), FunctionTemplate::New(Load));
@@ -681,6 +833,8 @@ Handle<ObjectTemplate> Shell::CreateGlobalTemplate() {
FunctionTemplate::New(DisableProfiler));
// Bind the handlers for external arrays.
+ global_template->Set(String::New("ArrayBuffer"),
+ FunctionTemplate::New(ArrayBuffer));
global_template->Set(String::New("Int8Array"),
FunctionTemplate::New(Int8Array));
global_template->Set(String::New("Uint8Array"),
@@ -749,6 +903,7 @@ void Shell::Initialize() {
// Start the debugger agent if requested.
if (i::FLAG_debugger_agent) {
v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true);
+ v8::Debug::SetDebugMessageDispatchHandler(DispatchDebugMessages, true);
}
#endif // ENABLE_DEBUGGER_SUPPORT
#endif // V8_SHARED
@@ -794,25 +949,50 @@ void Shell::Exit(int exit_code) {
#ifndef V8_SHARED
+struct CounterAndKey {
+ Counter* counter;
+ const char* key;
+};
+
+
+int CompareKeys(const void* a, const void* b) {
+ return strcmp(static_cast<const CounterAndKey*>(a)->key,
+ static_cast<const CounterAndKey*>(b)->key);
+}
+
+
void Shell::OnExit() {
if (console != NULL) console->Close();
if (i::FLAG_dump_counters) {
- printf("+----------------------------------------+-------------+\n");
- printf("| Name | Value |\n");
- printf("+----------------------------------------+-------------+\n");
+ int number_of_counters = 0;
for (CounterMap::Iterator i(counter_map_); i.More(); i.Next()) {
- Counter* counter = i.CurrentValue();
+ number_of_counters++;
+ }
+ CounterAndKey* counters = new CounterAndKey[number_of_counters];
+ int j = 0;
+ for (CounterMap::Iterator i(counter_map_); i.More(); i.Next(), j++) {
+ counters[j].counter = i.CurrentValue();
+ counters[j].key = i.CurrentKey();
+ }
+ qsort(counters, number_of_counters, sizeof(counters[0]), CompareKeys);
+ printf("+--------------------------------------------+-------------+\n");
+ printf("| Name | Value |\n");
+ printf("+--------------------------------------------+-------------+\n");
+ for (j = 0; j < number_of_counters; j++) {
+ Counter* counter = counters[j].counter;
+ const char* key = counters[j].key;
if (counter->is_histogram()) {
- printf("| c:%-36s | %11i |\n", i.CurrentKey(), counter->count());
- printf("| t:%-36s | %11i |\n", i.CurrentKey(), counter->sample_total());
+ printf("| c:%-40s | %11i |\n", key, counter->count());
+ printf("| t:%-40s | %11i |\n", key, counter->sample_total());
} else {
- printf("| %-38s | %11i |\n", i.CurrentKey(), counter->count());
+ printf("| %-42s | %11i |\n", key, counter->count());
}
}
- printf("+----------------------------------------+-------------+\n");
+ printf("+--------------------------------------------+-------------+\n");
+ delete [] counters;
}
- if (counters_file_ != NULL)
- delete counters_file_;
+ delete counters_file_;
+ delete counter_map_;
}
#endif // V8_SHARED
@@ -860,6 +1040,23 @@ static char* ReadChars(const char* name, int* size_out) {
}
+Handle<Value> Shell::ReadBinary(const Arguments& args) {
+ String::Utf8Value filename(args[0]);
+ int size;
+ if (*filename == NULL) {
+ return ThrowException(String::New("Error loading file"));
+ }
+ char* chars = ReadChars(*filename, &size);
+ if (chars == NULL) {
+ return ThrowException(String::New("Error reading file"));
+ }
+ // We skip checking the string for UTF8 characters and use it raw as
+ // backing store for the external string with 8-bit characters.
+ BinaryResource* resource = new BinaryResource(chars, size);
+ return String::NewExternal(resource);
+}
+
+
#ifndef V8_SHARED
static char* ReadToken(char* data, char token) {
char* next = i::OS::StrChr(data, token);
@@ -899,28 +1096,15 @@ void Shell::RunShell() {
Context::Scope context_scope(evaluation_context_);
HandleScope outer_scope;
Handle<String> name = String::New("(d8)");
-#ifndef V8_SHARED
console = LineEditor::Get();
printf("V8 version %s [console: %s]\n", V8::GetVersion(), console->name());
console->Open();
while (true) {
- i::SmartArrayPointer<char> input = console->Prompt(Shell::kPrompt);
- if (input.is_empty()) break;
- console->AddHistory(*input);
HandleScope inner_scope;
- ExecuteString(String::New(*input), name, true, true);
+ Handle<String> input = console->Prompt(Shell::kPrompt);
+ if (input.IsEmpty()) break;
+ ExecuteString(input, name, true, true);
}
-#else
- printf("V8 version %s [D8 light using shared library]\n", V8::GetVersion());
- static const int kBufferSize = 256;
- while (true) {
- char buffer[kBufferSize];
- printf("%s", Shell::kPrompt);
- if (fgets(buffer, kBufferSize, stdin) == NULL) break;
- HandleScope inner_scope;
- ExecuteString(String::New(buffer), name, true, true);
- }
-#endif // V8_SHARED
printf("\n");
}
@@ -1043,14 +1227,11 @@ Handle<String> SourceGroup::ReadFile(const char* name) {
#ifndef V8_SHARED
i::Thread::Options SourceGroup::GetThreadOptions() {
- i::Thread::Options options;
- options.name = "IsolateThread";
// On some systems (OSX 10.6) the stack size default is 0.5Mb or less
// which is not enough to parse the big literal expressions used in tests.
// The stack size should be at least StackGuard::kLimitSize + some
- // OS-specific padding for thread startup code.
- options.stack_size = 2 << 20; // 2 Mb seems to be enough
- return options;
+ // OS-specific padding for thread startup code. 2Mbytes seems to be enough.
+ return i::Thread::Options("IsolateThread", 2 * MB);
}
@@ -1121,7 +1302,7 @@ bool Shell::SetOptions(int argc, char* argv[]) {
options.use_preemption = true;
argv[i] = NULL;
#endif // V8_SHARED
- } else if (strcmp(argv[i], "--no-preemption") == 0) {
+ } else if (strcmp(argv[i], "--nopreemption") == 0) {
#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
@@ -1269,6 +1450,13 @@ int Shell::RunMain(int argc, char* argv[]) {
}
if (!options.last_run) {
context.Dispose();
+#if !defined(V8_SHARED)
+ if (i::FLAG_send_idle_notification) {
+ const int kLongIdlePauseInMs = 1000;
+ V8::ContextDisposedNotification();
+ V8::IdleNotification(kLongIdlePauseInMs);
+ }
+#endif // !V8_SHARED
}
#ifndef V8_SHARED
@@ -1318,6 +1506,15 @@ int Shell::Main(int argc, char* argv[]) {
}
printf("======== Full Deoptimization =======\n");
Testing::DeoptimizeAll();
+#if !defined(V8_SHARED)
+ } else if (i::FLAG_stress_runs > 0) {
+ int stress_runs = i::FLAG_stress_runs;
+ for (int i = 0; i < stress_runs && result == 0; i++) {
+ printf("============ Run %d/%d ============\n", i + 1, stress_runs);
+ options.last_run = (i == stress_runs - 1);
+ result = RunMain(argc, argv);
+ }
+#endif
} else {
result = RunMain(argc, argv);
}
diff --git a/src/3rdparty/v8/src/d8.gyp b/src/3rdparty/v8/src/d8.gyp
index 70186cf..a8361e6 100644
--- a/src/3rdparty/v8/src/d8.gyp
+++ b/src/3rdparty/v8/src/d8.gyp
@@ -1,4 +1,4 @@
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -41,9 +41,6 @@
'include_dirs+': [
'../src',
],
- 'defines': [
- 'ENABLE_DEBUGGER_SUPPORT',
- ],
'sources': [
'd8.cc',
],
@@ -64,8 +61,8 @@
'libraries': [ '-lreadline', ],
'sources': [ 'd8-readline.cc' ],
}],
- [ '(OS=="linux" or OS=="mac" or OS=="freebsd" \
- or OS=="openbsd" or OS=="solaris")', {
+ ['(OS=="linux" or OS=="mac" or OS=="freebsd" or OS=="netbsd" \
+ or OS=="openbsd" or OS=="solaris" or OS=="android")', {
'sources': [ 'd8-posix.cc', ]
}],
[ 'OS=="win"', {
diff --git a/src/3rdparty/v8/src/d8.h b/src/3rdparty/v8/src/d8.h
index 15d8d5d..c872f90 100644
--- a/src/3rdparty/v8/src/d8.h
+++ b/src/3rdparty/v8/src/d8.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -116,14 +116,13 @@ class CounterMap {
#endif // V8_SHARED
-#ifndef V8_SHARED
class LineEditor {
public:
enum Type { DUMB = 0, READLINE = 1 };
LineEditor(Type type, const char* name);
virtual ~LineEditor() { }
- virtual i::SmartArrayPointer<char> Prompt(const char* prompt) = 0;
+ virtual Handle<String> Prompt(const char* prompt) = 0;
virtual bool Open() { return true; }
virtual bool Close() { return true; }
virtual void AddHistory(const char* str) { }
@@ -136,7 +135,6 @@ class LineEditor {
LineEditor* next_;
static LineEditor* first_;
};
-#endif // V8_SHARED
class SourceGroup {
@@ -197,6 +195,27 @@ class SourceGroup {
};
+class BinaryResource : public v8::String::ExternalAsciiStringResource {
+ public:
+ BinaryResource(const char* string, int length)
+ : data_(string),
+ length_(length) { }
+
+ ~BinaryResource() {
+ delete[] data_;
+ data_ = NULL;
+ length_ = 0;
+ }
+
+ virtual const char* data() const { return data_; }
+ virtual size_t length() const { return length_; }
+
+ private:
+ const char* data_;
+ size_t length_;
+};
+
+
class ShellOptions {
public:
ShellOptions() :
@@ -268,12 +287,13 @@ class Shell : public i::AllStatic {
size_t buckets);
static void AddHistogramSample(void* histogram, int sample);
static void MapCounters(const char* name);
-#endif // V8_SHARED
#ifdef ENABLE_DEBUGGER_SUPPORT
static Handle<Object> DebugMessageDetails(Handle<String> message);
static Handle<Value> DebugCommandToJSONRequest(Handle<String> command);
-#endif
+ static void DispatchDebugMessages();
+#endif // ENABLE_DEBUGGER_SUPPORT
+#endif // V8_SHARED
#ifdef WIN32
#undef Yield
@@ -287,8 +307,13 @@ class Shell : public i::AllStatic {
static Handle<Value> EnableProfiler(const Arguments& args);
static Handle<Value> DisableProfiler(const Arguments& args);
static Handle<Value> Read(const Arguments& args);
- static Handle<Value> ReadLine(const Arguments& args);
+ static Handle<Value> ReadBinary(const Arguments& args);
+ static Handle<String> ReadFromStdin();
+ static Handle<Value> ReadLine(const Arguments& args) {
+ return ReadFromStdin();
+ }
static Handle<Value> Load(const Arguments& args);
+ static Handle<Value> ArrayBuffer(const Arguments& args);
static Handle<Value> Int8Array(const Arguments& args);
static Handle<Value> Uint8Array(const Arguments& args);
static Handle<Value> Int16Array(const Arguments& args);
@@ -334,11 +359,8 @@ class Shell : public i::AllStatic {
static Handle<Value> RemoveDirectory(const Arguments& args);
static void AddOSMethods(Handle<ObjectTemplate> os_template);
-#ifndef V8_SHARED
- static const char* kHistoryFileName;
- static const int kMaxHistoryEntries;
+
static LineEditor* console;
-#endif // V8_SHARED
static const char* kPrompt;
static ShellOptions options;
diff --git a/src/3rdparty/v8/src/d8.js b/src/3rdparty/v8/src/d8.js
index 3009037..819135a 100644
--- a/src/3rdparty/v8/src/d8.js
+++ b/src/3rdparty/v8/src/d8.js
@@ -25,11 +25,14 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"use strict";
+
String.prototype.startsWith = function (str) {
- if (str.length > this.length)
+ if (str.length > this.length) {
return false;
+ }
return this.substr(0, str.length) == str;
-}
+};
function log10(num) {
return Math.log(num)/Math.log(10);
@@ -52,8 +55,9 @@ function GetCompletions(global, last, full) {
for (var i = 0; i < parts.length; i++) {
var part = parts[i];
var next = current[part];
- if (!next)
+ if (!next) {
return [];
+ }
current = next;
}
var result = [];
@@ -63,8 +67,9 @@ function GetCompletions(global, last, full) {
var properties = mirror.properties();
for (var i = 0; i < properties.length; i++) {
var name = properties[i].name();
- if (typeof name === 'string' && name.startsWith(last))
+ if (typeof name === 'string' && name.startsWith(last)) {
result.push(name);
+ }
}
current = ToInspectableObject(current.__proto__);
}
@@ -73,7 +78,7 @@ function GetCompletions(global, last, full) {
// Global object holding debugger related constants and state.
-const Debug = {};
+var Debug = {};
// Debug events which can occour in the V8 JavaScript engine. These originate
@@ -108,22 +113,24 @@ Debug.ScopeType = { Global: 0,
// Current debug state.
-const kNoFrame = -1;
+var kNoFrame = -1;
Debug.State = {
currentFrame: kNoFrame,
displaySourceStartLine: -1,
displaySourceEndLine: -1,
currentSourceLine: -1
-}
+};
var trace_compile = false; // Tracing all compile events?
var trace_debug_json = false; // Tracing all debug json packets?
-var last_cmd_line = '';
+var last_cmd = '';
//var lol_is_enabled; // Set to true in d8.cc if LIVE_OBJECT_LIST is defined.
var lol_next_dump_index = 0;
-const kDefaultLolLinesToPrintAtATime = 10;
-const kMaxLolLinesToPrintAtATime = 1000;
+var kDefaultLolLinesToPrintAtATime = 10;
+var kMaxLolLinesToPrintAtATime = 1000;
var repeat_cmd_line = '';
var is_running = true;
+// Global variable used to store whether a handle was requested.
+var lookup_handle = null;
// Copied from debug-delay.js. This is needed below:
function ScriptTypeFlag(type) {
@@ -150,7 +157,7 @@ function DebugMessageDetails(message) {
}
function DebugEventDetails(response) {
- details = {text:'', running:false}
+ var details = {text:'', running:false};
// Get the running state.
details.running = response.running();
@@ -217,7 +224,7 @@ function DebugEventDetails(response) {
case 'afterCompile':
if (trace_compile) {
- result = 'Source ' + body.script.name + ' compiled:\n'
+ result = 'Source ' + body.script.name + ' compiled:\n';
var source = body.script.source;
if (!(source[source.length - 1] == '\n')) {
result += source;
@@ -237,7 +244,7 @@ function DebugEventDetails(response) {
}
return details;
-};
+}
function SourceInfo(body) {
@@ -279,7 +286,7 @@ function SourceUnderline(source_text, position) {
// Return the source line text with the underline beneath.
return source_text + '\n' + underline;
-};
+}
// Converts a text command to a JSON request.
@@ -289,7 +296,7 @@ function DebugCommandToJSONRequest(cmd_line) {
print("sending: '" + result + "'");
}
return result;
-};
+}
function DebugRequest(cmd_line) {
@@ -514,7 +521,7 @@ function DebugRequest(cmd_line) {
DebugRequest.prototype.JSONRequest = function() {
return this.request_;
-}
+};
function RequestPacket(command) {
@@ -536,14 +543,14 @@ RequestPacket.prototype.toJSONProtocol = function() {
json += ',"arguments":';
// Encode the arguments part.
if (this.arguments.toJSONProtocol) {
- json += this.arguments.toJSONProtocol()
+ json += this.arguments.toJSONProtocol();
} else {
json += SimpleObjectToJSON_(this.arguments);
}
}
json += '}';
return json;
-}
+};
DebugRequest.prototype.createRequest = function(command) {
@@ -583,7 +590,6 @@ DebugRequest.prototype.createLOLRequest = function(command,
// Create a JSON request for the evaluation command.
DebugRequest.prototype.makeEvaluateJSONRequest_ = function(expression) {
- // Global varaible used to store whether a handle was requested.
lookup_handle = null;
if (lol_is_enabled) {
@@ -1310,7 +1316,7 @@ DebugRequest.prototype.lolMakeListRequest =
}
return request;
-}
+};
function extractObjId(args) {
@@ -1499,7 +1505,7 @@ DebugRequest.prototype.traceCommand_ = function(args) {
} else {
throw new Error('Invalid trace arguments.');
}
-}
+};
// Handle the help command.
DebugRequest.prototype.helpCommand_ = function(args) {
@@ -1608,7 +1614,7 @@ DebugRequest.prototype.helpCommand_ = function(args) {
print('');
print('disconnect|exit|quit - disconnects and quits the debugger');
print('help - prints this help information');
-}
+};
function formatHandleReference_(value) {
@@ -1623,7 +1629,7 @@ function formatHandleReference_(value) {
function formatObject_(value, include_properties) {
var result = '';
result += formatHandleReference_(value);
- result += ', type: object'
+ result += ', type: object';
result += ', constructor ';
var ctor = value.constructorFunctionValue();
result += formatHandleReference_(ctor);
@@ -1943,7 +1949,7 @@ function roundNumber(num, length) {
// Convert a JSON response to text for display in a text based debugger.
function DebugResponseDetails(response) {
- details = {text:'', running:false}
+ var details = { text: '', running: false };
try {
if (!response.success()) {
@@ -2168,7 +2174,7 @@ function DebugResponseDetails(response) {
}
var current_line = from_line + num;
- spacer = maxdigits - (1 + Math.floor(log10(current_line)));
+ var spacer = maxdigits - (1 + Math.floor(log10(current_line)));
if (current_line == Debug.State.currentSourceLine + 1) {
for (var i = 0; i < maxdigits; i++) {
result += '>';
@@ -2308,7 +2314,7 @@ function DebugResponseDetails(response) {
}
return details;
-};
+}
/**
@@ -2334,7 +2340,7 @@ function ProtocolPackage(json) {
*/
ProtocolPackage.prototype.type = function() {
return this.packet_.type;
-}
+};
/**
@@ -2343,7 +2349,7 @@ ProtocolPackage.prototype.type = function() {
*/
ProtocolPackage.prototype.event = function() {
return this.packet_.event;
-}
+};
/**
@@ -2352,7 +2358,7 @@ ProtocolPackage.prototype.event = function() {
*/
ProtocolPackage.prototype.requestSeq = function() {
return this.packet_.request_seq;
-}
+};
/**
@@ -2361,27 +2367,27 @@ ProtocolPackage.prototype.requestSeq = function() {
*/
ProtocolPackage.prototype.running = function() {
return this.packet_.running ? true : false;
-}
+};
ProtocolPackage.prototype.success = function() {
return this.packet_.success ? true : false;
-}
+};
ProtocolPackage.prototype.message = function() {
return this.packet_.message;
-}
+};
ProtocolPackage.prototype.command = function() {
return this.packet_.command;
-}
+};
ProtocolPackage.prototype.body = function() {
return this.packet_.body;
-}
+};
ProtocolPackage.prototype.bodyValue = function(index) {
@@ -2390,12 +2396,12 @@ ProtocolPackage.prototype.bodyValue = function(index) {
} else {
return new ProtocolValue(this.packet_.body, this);
}
-}
+};
ProtocolPackage.prototype.body = function() {
return this.packet_.body;
-}
+};
ProtocolPackage.prototype.lookup = function(handle) {
@@ -2405,12 +2411,12 @@ ProtocolPackage.prototype.lookup = function(handle) {
} else {
return new ProtocolReference(handle);
}
-}
+};
ProtocolPackage.prototype.raw_json = function() {
return this.raw_json_;
-}
+};
function ProtocolValue(value, packet) {
@@ -2425,7 +2431,7 @@ function ProtocolValue(value, packet) {
*/
ProtocolValue.prototype.type = function() {
return this.value_.type;
-}
+};
/**
@@ -2434,7 +2440,7 @@ ProtocolValue.prototype.type = function() {
*/
ProtocolValue.prototype.field = function(name) {
return this.value_[name];
-}
+};
/**
@@ -2444,7 +2450,7 @@ ProtocolValue.prototype.field = function(name) {
ProtocolValue.prototype.isPrimitive = function() {
return this.isUndefined() || this.isNull() || this.isBoolean() ||
this.isNumber() || this.isString();
-}
+};
/**
@@ -2453,7 +2459,7 @@ ProtocolValue.prototype.isPrimitive = function() {
*/
ProtocolValue.prototype.handle = function() {
return this.value_.handle;
-}
+};
/**
@@ -2462,7 +2468,7 @@ ProtocolValue.prototype.handle = function() {
*/
ProtocolValue.prototype.isUndefined = function() {
return this.value_.type == 'undefined';
-}
+};
/**
@@ -2471,7 +2477,7 @@ ProtocolValue.prototype.isUndefined = function() {
*/
ProtocolValue.prototype.isNull = function() {
return this.value_.type == 'null';
-}
+};
/**
@@ -2480,7 +2486,7 @@ ProtocolValue.prototype.isNull = function() {
*/
ProtocolValue.prototype.isBoolean = function() {
return this.value_.type == 'boolean';
-}
+};
/**
@@ -2489,7 +2495,7 @@ ProtocolValue.prototype.isBoolean = function() {
*/
ProtocolValue.prototype.isNumber = function() {
return this.value_.type == 'number';
-}
+};
/**
@@ -2498,7 +2504,7 @@ ProtocolValue.prototype.isNumber = function() {
*/
ProtocolValue.prototype.isString = function() {
return this.value_.type == 'string';
-}
+};
/**
@@ -2508,7 +2514,7 @@ ProtocolValue.prototype.isString = function() {
ProtocolValue.prototype.isObject = function() {
return this.value_.type == 'object' || this.value_.type == 'function' ||
this.value_.type == 'error' || this.value_.type == 'regexp';
-}
+};
/**
@@ -2518,7 +2524,7 @@ ProtocolValue.prototype.isObject = function() {
ProtocolValue.prototype.constructorFunctionValue = function() {
var ctor = this.value_.constructorFunction;
return this.packet_.lookup(ctor.ref);
-}
+};
/**
@@ -2528,7 +2534,7 @@ ProtocolValue.prototype.constructorFunctionValue = function() {
ProtocolValue.prototype.protoObjectValue = function() {
var proto = this.value_.protoObject;
return this.packet_.lookup(proto.ref);
-}
+};
/**
@@ -2537,7 +2543,7 @@ ProtocolValue.prototype.protoObjectValue = function() {
*/
ProtocolValue.prototype.propertyCount = function() {
return this.value_.properties ? this.value_.properties.length : 0;
-}
+};
/**
@@ -2547,7 +2553,7 @@ ProtocolValue.prototype.propertyCount = function() {
ProtocolValue.prototype.propertyName = function(index) {
var property = this.value_.properties[index];
return property.name;
-}
+};
/**
@@ -2562,7 +2568,7 @@ ProtocolValue.prototype.propertyIndex = function(name) {
}
}
return null;
-}
+};
/**
@@ -2572,7 +2578,7 @@ ProtocolValue.prototype.propertyIndex = function(name) {
ProtocolValue.prototype.propertyValue = function(index) {
var property = this.value_.properties[index];
return this.packet_.lookup(property.ref);
-}
+};
/**
@@ -2581,12 +2587,12 @@ ProtocolValue.prototype.propertyValue = function(index) {
*/
ProtocolValue.prototype.value = function() {
return this.value_.value;
-}
+};
ProtocolValue.prototype.valueString = function() {
return this.value_.text;
-}
+};
function ProtocolReference(handle) {
@@ -2596,7 +2602,7 @@ function ProtocolReference(handle) {
ProtocolReference.prototype.handle = function() {
return this.handle_;
-}
+};
function MakeJSONPair_(name, value) {
@@ -2626,7 +2632,7 @@ function NumberToJSON_(value) {
// Mapping of some control characters to avoid the \uXXXX syntax for most
// commonly used control cahracters.
-const ctrlCharMap_ = {
+var ctrlCharMap_ = {
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
@@ -2638,12 +2644,12 @@ const ctrlCharMap_ = {
// Regular expression testing for ", \ and control characters (0x00 - 0x1F).
-const ctrlCharTest_ = new RegExp('["\\\\\x00-\x1F]');
+var ctrlCharTest_ = new RegExp('["\\\\\x00-\x1F]');
// Regular expression matching ", \ and control characters (0x00 - 0x1F)
// globally.
-const ctrlCharMatch_ = new RegExp('["\\\\\x00-\x1F]', 'g');
+var ctrlCharMatch_ = new RegExp('["\\\\\x00-\x1F]', 'g');
/**
@@ -2667,7 +2673,7 @@ function StringToJSON_(value) {
// Convert control character to unicode escape sequence.
return '\\u00' +
'0' + // TODO %NumberToRadixString(Math.floor(mapped / 16), 16) +
- '0' // TODO %NumberToRadixString(mapped % 16, 16);
+ '0'; // TODO %NumberToRadixString(mapped % 16, 16)
})
+ '"';
}
@@ -2685,12 +2691,12 @@ function StringToJSON_(value) {
* @return {string} JSON formatted Date value
*/
function DateToISO8601_(value) {
- function f(n) {
+ var f = function(n) {
return n < 10 ? '0' + n : n;
- }
- function g(n) {
+ };
+ var g = function(n) {
return n < 10 ? '00' + n : n < 100 ? '0' + n : n;
- }
+ };
return builtins.GetUTCFullYearFrom(value) + '-' +
f(builtins.GetUTCMonthFrom(value) + 1) + '-' +
f(builtins.GetUTCDateFrom(value)) + 'T' +
@@ -2738,7 +2744,7 @@ function SimpleObjectToJSON_(object) {
if (property_value === null) {
property_value_json = 'null';
} else if (typeof property_value.toJSONProtocol == 'function') {
- property_value_json = property_value.toJSONProtocol(true)
+ property_value_json = property_value.toJSONProtocol(true);
} else if (property_value.constructor.name == 'Array'){
property_value_json = SimpleArrayToJSON_(property_value);
} else {
@@ -2789,7 +2795,7 @@ function SimpleArrayToJSON_(array) {
}
var elem = array[i];
if (elem.toJSONProtocol) {
- json += elem.toJSONProtocol(true)
+ json += elem.toJSONProtocol(true);
} else if (typeof(elem) === 'object') {
json += SimpleObjectToJSON_(elem);
} else if (typeof(elem) === 'boolean') {
diff --git a/src/3rdparty/v8/src/data-flow.h b/src/3rdparty/v8/src/data-flow.h
index d69d6c7..71f56e7 100644
--- a/src/3rdparty/v8/src/data-flow.h
+++ b/src/3rdparty/v8/src/data-flow.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -85,18 +85,18 @@ class BitVector: public ZoneObject {
friend class BitVector;
};
- explicit BitVector(int length)
+ BitVector(int length, Zone* zone)
: length_(length),
data_length_(SizeFor(length)),
- data_(ZONE->NewArray<uint32_t>(data_length_)) {
+ data_(zone->NewArray<uint32_t>(data_length_)) {
ASSERT(length > 0);
Clear();
}
- BitVector(const BitVector& other)
+ BitVector(const BitVector& other, Zone* zone)
: length_(other.length()),
data_length_(SizeFor(length_)),
- data_(ZONE->NewArray<uint32_t>(data_length_)) {
+ data_(zone->NewArray<uint32_t>(data_length_)) {
CopyFrom(other);
}
diff --git a/src/3rdparty/v8/src/date.cc b/src/3rdparty/v8/src/date.cc
new file mode 100644
index 0000000..a377451
--- /dev/null
+++ b/src/3rdparty/v8/src/date.cc
@@ -0,0 +1,384 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "date.h"
+
+#include "v8.h"
+
+#include "objects.h"
+#include "objects-inl.h"
+
+namespace v8 {
+namespace internal {
+
+
+static const int kDays4Years[] = {0, 365, 2 * 365, 3 * 365 + 1};
+static const int kDaysIn4Years = 4 * 365 + 1;
+static const int kDaysIn100Years = 25 * kDaysIn4Years - 1;
+static const int kDaysIn400Years = 4 * kDaysIn100Years + 1;
+static const int kDays1970to2000 = 30 * 365 + 7;
+static const int kDaysOffset = 1000 * kDaysIn400Years + 5 * kDaysIn400Years -
+ kDays1970to2000;
+static const int kYearsOffset = 400000;
+static const char kDaysInMonths[] =
+ {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
+
+
+void DateCache::ResetDateCache() {
+ static const int kMaxStamp = Smi::kMaxValue;
+ stamp_ = Smi::FromInt(stamp_->value() + 1);
+ if (stamp_->value() > kMaxStamp) {
+ stamp_ = Smi::FromInt(0);
+ }
+ ASSERT(stamp_ != Smi::FromInt(kInvalidStamp));
+ for (int i = 0; i < kDSTSize; ++i) {
+ ClearSegment(&dst_[i]);
+ }
+ dst_usage_counter_ = 0;
+ before_ = &dst_[0];
+ after_ = &dst_[1];
+ local_offset_ms_ = kInvalidLocalOffsetInMs;
+ ymd_valid_ = false;
+}
+
+
+void DateCache::ClearSegment(DST* segment) {
+ segment->start_sec = kMaxEpochTimeInSec;
+ segment->end_sec = -kMaxEpochTimeInSec;
+ segment->offset_ms = 0;
+ segment->last_used = 0;
+}
+
+
+void DateCache::YearMonthDayFromDays(
+ int days, int* year, int* month, int* day) {
+ if (ymd_valid_) {
+ // Check conservatively if the given 'days' has
+ // the same year and month as the cached 'days'.
+ int new_day = ymd_day_ + (days - ymd_days_);
+ if (new_day >= 1 && new_day <= 28) {
+ ymd_day_ = new_day;
+ ymd_days_ = days;
+ *year = ymd_year_;
+ *month = ymd_month_;
+ *day = new_day;
+ return;
+ }
+ }
+ int save_days = days;
+
+ days += kDaysOffset;
+ *year = 400 * (days / kDaysIn400Years) - kYearsOffset;
+ days %= kDaysIn400Years;
+
+ ASSERT(DaysFromYearMonth(*year, 0) + days == save_days);
+
+ days--;
+ int yd1 = days / kDaysIn100Years;
+ days %= kDaysIn100Years;
+ *year += 100 * yd1;
+
+ days++;
+ int yd2 = days / kDaysIn4Years;
+ days %= kDaysIn4Years;
+ *year += 4 * yd2;
+
+ days--;
+ int yd3 = days / 365;
+ days %= 365;
+ *year += yd3;
+
+
+ bool is_leap = (!yd1 || yd2) && !yd3;
+
+ ASSERT(days >= -1);
+ ASSERT(is_leap || (days >= 0));
+ ASSERT((days < 365) || (is_leap && (days < 366)));
+ ASSERT(is_leap == ((*year % 4 == 0) && (*year % 100 || (*year % 400 == 0))));
+ ASSERT(is_leap || ((DaysFromYearMonth(*year, 0) + days) == save_days));
+ ASSERT(!is_leap || ((DaysFromYearMonth(*year, 0) + days + 1) == save_days));
+
+ days += is_leap;
+
+ // Check if the date is after February.
+ if (days >= 31 + 28 + is_leap) {
+ days -= 31 + 28 + is_leap;
+ // Find the date starting from March.
+ for (int i = 2; i < 12; i++) {
+ if (days < kDaysInMonths[i]) {
+ *month = i;
+ *day = days + 1;
+ break;
+ }
+ days -= kDaysInMonths[i];
+ }
+ } else {
+ // Check January and February.
+ if (days < 31) {
+ *month = 0;
+ *day = days + 1;
+ } else {
+ *month = 1;
+ *day = days - 31 + 1;
+ }
+ }
+ ASSERT(DaysFromYearMonth(*year, *month) + *day - 1 == save_days);
+ ymd_valid_ = true;
+ ymd_year_ = *year;
+ ymd_month_ = *month;
+ ymd_day_ = *day;
+ ymd_days_ = save_days;
+}
+
+
+int DateCache::DaysFromYearMonth(int year, int month) {
+ static const int day_from_month[] = {0, 31, 59, 90, 120, 151,
+ 181, 212, 243, 273, 304, 334};
+ static const int day_from_month_leap[] = {0, 31, 60, 91, 121, 152,
+ 182, 213, 244, 274, 305, 335};
+
+ year += month / 12;
+ month %= 12;
+ if (month < 0) {
+ year--;
+ month += 12;
+ }
+
+ ASSERT(month >= 0);
+ ASSERT(month < 12);
+
+ // year_delta is an arbitrary number such that:
+ // a) year_delta = -1 (mod 400)
+ // b) year + year_delta > 0 for years in the range defined by
+ // ECMA 262 - 15.9.1.1, i.e. upto 100,000,000 days on either side of
+ // Jan 1 1970. This is required so that we don't run into integer
+ // division of negative numbers.
+ // c) there shouldn't be an overflow for 32-bit integers in the following
+ // operations.
+ static const int year_delta = 399999;
+ static const int base_day = 365 * (1970 + year_delta) +
+ (1970 + year_delta) / 4 -
+ (1970 + year_delta) / 100 +
+ (1970 + year_delta) / 400;
+
+ int year1 = year + year_delta;
+ int day_from_year = 365 * year1 +
+ year1 / 4 -
+ year1 / 100 +
+ year1 / 400 -
+ base_day;
+
+ if ((year % 4 != 0) || (year % 100 == 0 && year % 400 != 0)) {
+ return day_from_year + day_from_month[month];
+ }
+ return day_from_year + day_from_month_leap[month];
+}
+
+
+void DateCache::ExtendTheAfterSegment(int time_sec, int offset_ms) {
+ if (after_->offset_ms == offset_ms &&
+ after_->start_sec <= time_sec + kDefaultDSTDeltaInSec &&
+ time_sec <= after_->end_sec) {
+ // Extend the after_ segment.
+ after_->start_sec = time_sec;
+ } else {
+ // The after_ segment is either invalid or starts too late.
+ if (after_->start_sec <= after_->end_sec) {
+ // If the after_ segment is valid, replace it with a new segment.
+ after_ = LeastRecentlyUsedDST(before_);
+ }
+ after_->start_sec = time_sec;
+ after_->end_sec = time_sec;
+ after_->offset_ms = offset_ms;
+ after_->last_used = ++dst_usage_counter_;
+ }
+}
+
+
+int DateCache::DaylightSavingsOffsetInMs(int64_t time_ms) {
+ int time_sec = (time_ms >= 0 && time_ms <= kMaxEpochTimeInMs)
+ ? static_cast<int>(time_ms / 1000)
+ : static_cast<int>(EquivalentTime(time_ms) / 1000);
+
+ // Invalidate cache if the usage counter is close to overflow.
+ // Note that dst_usage_counter is incremented less than ten times
+ // in this function.
+ if (dst_usage_counter_ >= kMaxInt - 10) {
+ dst_usage_counter_ = 0;
+ for (int i = 0; i < kDSTSize; ++i) {
+ ClearSegment(&dst_[i]);
+ }
+ }
+
+ // Optimistic fast check.
+ if (before_->start_sec <= time_sec &&
+ time_sec <= before_->end_sec) {
+ // Cache hit.
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ ProbeDST(time_sec);
+
+ ASSERT(InvalidSegment(before_) || before_->start_sec <= time_sec);
+ ASSERT(InvalidSegment(after_) || time_sec < after_->start_sec);
+
+ if (InvalidSegment(before_)) {
+ // Cache miss.
+ before_->start_sec = time_sec;
+ before_->end_sec = time_sec;
+ before_->offset_ms = GetDaylightSavingsOffsetFromOS(time_sec);
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ if (time_sec <= before_->end_sec) {
+ // Cache hit.
+ before_->last_used = ++dst_usage_counter_;
+ return before_->offset_ms;
+ }
+
+ if (time_sec > before_->end_sec + kDefaultDSTDeltaInSec) {
+ // If the before_ segment ends too early, then just
+ // query for the offset of the time_sec
+ int offset_ms = GetDaylightSavingsOffsetFromOS(time_sec);
+ ExtendTheAfterSegment(time_sec, offset_ms);
+ // This swap helps the optimistic fast check in subsequent invocations.
+ DST* temp = before_;
+ before_ = after_;
+ after_ = temp;
+ return offset_ms;
+ }
+
+ // Now the time_sec is between
+ // before_->end_sec and before_->end_sec + default DST delta.
+ // Update the usage counter of before_ since it is going to be used.
+ before_->last_used = ++dst_usage_counter_;
+
+ // Check if after_ segment is invalid or starts too late.
+ // Note that start_sec of invalid segments is kMaxEpochTimeInSec.
+ if (before_->end_sec + kDefaultDSTDeltaInSec <= after_->start_sec) {
+ int new_after_start_sec = before_->end_sec + kDefaultDSTDeltaInSec;
+ int new_offset_ms = GetDaylightSavingsOffsetFromOS(new_after_start_sec);
+ ExtendTheAfterSegment(new_after_start_sec, new_offset_ms);
+ } else {
+ ASSERT(!InvalidSegment(after_));
+ // Update the usage counter of after_ since it is going to be used.
+ after_->last_used = ++dst_usage_counter_;
+ }
+
+ // Now the time_sec is between before_->end_sec and after_->start_sec.
+ // Only one daylight savings offset change can occur in this interval.
+
+ if (before_->offset_ms == after_->offset_ms) {
+ // Merge two segments if they have the same offset.
+ before_->end_sec = after_->end_sec;
+ ClearSegment(after_);
+ return before_->offset_ms;
+ }
+
+ // Binary search for daylight savings offset change point,
+ // but give up if we don't find it in four iterations.
+ for (int i = 4; i >= 0; --i) {
+ int delta = after_->start_sec - before_->end_sec;
+ int middle_sec = (i == 0) ? time_sec : before_->end_sec + delta / 2;
+ int offset_ms = GetDaylightSavingsOffsetFromOS(middle_sec);
+ if (before_->offset_ms == offset_ms) {
+ before_->end_sec = middle_sec;
+ if (time_sec <= before_->end_sec) {
+ return offset_ms;
+ }
+ } else {
+ ASSERT(after_->offset_ms == offset_ms);
+ after_->start_sec = middle_sec;
+ if (time_sec >= after_->start_sec) {
+ // This swap helps the optimistic fast check in subsequent invocations.
+ DST* temp = before_;
+ before_ = after_;
+ after_ = temp;
+ return offset_ms;
+ }
+ }
+ }
+ UNREACHABLE();
+ return 0;
+}
+
+
+void DateCache::ProbeDST(int time_sec) {
+ DST* before = NULL;
+ DST* after = NULL;
+ ASSERT(before_ != after_);
+
+ for (int i = 0; i < kDSTSize; ++i) {
+ if (dst_[i].start_sec <= time_sec) {
+ if (before == NULL || before->start_sec < dst_[i].start_sec) {
+ before = &dst_[i];
+ }
+ } else if (time_sec < dst_[i].end_sec) {
+ if (after == NULL || after->end_sec > dst_[i].end_sec) {
+ after = &dst_[i];
+ }
+ }
+ }
+
+ // If before or after segments were not found,
+ // then set them to any invalid segment.
+ if (before == NULL) {
+ before = InvalidSegment(before_) ? before_ : LeastRecentlyUsedDST(after);
+ }
+ if (after == NULL) {
+ after = InvalidSegment(after_) && before != after_
+ ? after_ : LeastRecentlyUsedDST(before);
+ }
+
+ ASSERT(before != NULL);
+ ASSERT(after != NULL);
+ ASSERT(before != after);
+ ASSERT(InvalidSegment(before) || before->start_sec <= time_sec);
+ ASSERT(InvalidSegment(after) || time_sec < after->start_sec);
+ ASSERT(InvalidSegment(before) || InvalidSegment(after) ||
+ before->end_sec < after->start_sec);
+
+ before_ = before;
+ after_ = after;
+}
+
+
+DateCache::DST* DateCache::LeastRecentlyUsedDST(DST* skip) {
+ DST* result = NULL;
+ for (int i = 0; i < kDSTSize; ++i) {
+ if (&dst_[i] == skip) continue;
+ if (result == NULL || result->last_used > dst_[i].last_used) {
+ result = &dst_[i];
+ }
+ }
+ ClearSegment(result);
+ return result;
+}
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/date.h b/src/3rdparty/v8/src/date.h
new file mode 100644
index 0000000..fcd61db
--- /dev/null
+++ b/src/3rdparty/v8/src/date.h
@@ -0,0 +1,260 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_DATE_H_
+#define V8_DATE_H_
+
+#include "allocation.h"
+#include "globals.h"
+#include "platform.h"
+
+
+namespace v8 {
+namespace internal {
+
+class DateCache {
+ public:
+ static const int kMsPerMin = 60 * 1000;
+ static const int kSecPerDay = 24 * 60 * 60;
+ static const int64_t kMsPerDay = kSecPerDay * 1000;
+
+ // The largest time that can be passed to OS date-time library functions.
+ static const int kMaxEpochTimeInSec = kMaxInt;
+ static const int64_t kMaxEpochTimeInMs =
+ static_cast<int64_t>(kMaxInt) * 1000;
+
+ // The largest time that can be stored in JSDate.
+ static const int64_t kMaxTimeInMs =
+ static_cast<int64_t>(864000000) * 10000000;
+
+ // Conservative upper bound on time that can be stored in JSDate
+ // before UTC conversion.
+ static const int64_t kMaxTimeBeforeUTCInMs =
+ kMaxTimeInMs + 10 * kMsPerDay;
+
+ // Sentinel that denotes an invalid local offset.
+ static const int kInvalidLocalOffsetInMs = kMaxInt;
+ // Sentinel that denotes an invalid cache stamp.
+ // It is an invariant of DateCache that cache stamp is non-negative.
+ static const int kInvalidStamp = -1;
+
+ DateCache() : stamp_(0) {
+ ResetDateCache();
+ }
+
+ virtual ~DateCache() {}
+
+
+ // Clears cached timezone information and increments the cache stamp.
+ void ResetDateCache();
+
+
+ // Computes floor(time_ms / kMsPerDay).
+ static int DaysFromTime(int64_t time_ms) {
+ if (time_ms < 0) time_ms -= (kMsPerDay - 1);
+ return static_cast<int>(time_ms / kMsPerDay);
+ }
+
+
+ // Computes modulo(time_ms, kMsPerDay) given that
+ // days = floor(time_ms / kMsPerDay).
+ static int TimeInDay(int64_t time_ms, int days) {
+ return static_cast<int>(time_ms - days * kMsPerDay);
+ }
+
+
+ // Given the number of days since the epoch, computes the weekday.
+ // ECMA 262 - 15.9.1.6.
+ int Weekday(int days) {
+ int result = (days + 4) % 7;
+ return result >= 0 ? result : result + 7;
+ }
+
+
+ bool IsLeap(int year) {
+ return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
+ }
+
+
+ // ECMA 262 - 15.9.1.7.
+ int LocalOffsetInMs() {
+ if (local_offset_ms_ == kInvalidLocalOffsetInMs) {
+ local_offset_ms_ = GetLocalOffsetFromOS();
+ }
+ return local_offset_ms_;
+ }
+
+
+ const char* LocalTimezone(int64_t time_ms) {
+ if (time_ms < 0 || time_ms > kMaxEpochTimeInMs) {
+ time_ms = EquivalentTime(time_ms);
+ }
+ return OS::LocalTimezone(static_cast<double>(time_ms));
+ }
+
+ // ECMA 262 - 15.9.5.26
+ int TimezoneOffset(int64_t time_ms) {
+ int64_t local_ms = ToLocal(time_ms);
+ return static_cast<int>((time_ms - local_ms) / kMsPerMin);
+ }
+
+ // ECMA 262 - 15.9.1.9
+ int64_t ToLocal(int64_t time_ms) {
+ return time_ms + LocalOffsetInMs() + DaylightSavingsOffsetInMs(time_ms);
+ }
+
+ // ECMA 262 - 15.9.1.9
+ int64_t ToUTC(int64_t time_ms) {
+ time_ms -= LocalOffsetInMs();
+ return time_ms - DaylightSavingsOffsetInMs(time_ms);
+ }
+
+
+ // Computes a time equivalent to the given time according
+ // to ECMA 262 - 15.9.1.9.
+ // The issue here is that some library calls don't work right for dates
+ // that cannot be represented using a non-negative signed 32 bit integer
+ // (measured in whole seconds based on the 1970 epoch).
+ // We solve this by mapping the time to a year with same leap-year-ness
+ // and same starting day for the year. The ECMAscript specification says
+ // we must do this, but for compatibility with other browsers, we use
+ // the actual year if it is in the range 1970..2037
+ int64_t EquivalentTime(int64_t time_ms) {
+ int days = DaysFromTime(time_ms);
+ int time_within_day_ms = static_cast<int>(time_ms - days * kMsPerDay);
+ int year, month, day;
+ YearMonthDayFromDays(days, &year, &month, &day);
+ int new_days = DaysFromYearMonth(EquivalentYear(year), month) + day - 1;
+ return static_cast<int64_t>(new_days) * kMsPerDay + time_within_day_ms;
+ }
+
+ // Returns an equivalent year in the range [2008-2035] matching
+ // - leap year,
+ // - week day of first day.
+ // ECMA 262 - 15.9.1.9.
+ int EquivalentYear(int year) {
+ int week_day = Weekday(DaysFromYearMonth(year, 0));
+ int recent_year = (IsLeap(year) ? 1956 : 1967) + (week_day * 12) % 28;
+ // Find the year in the range 2008..2037 that is equivalent mod 28.
+ // Add 3*28 to give a positive argument to the modulus operator.
+ return 2008 + (recent_year + 3 * 28 - 2008) % 28;
+ }
+
+ // Given the number of days since the epoch, computes
+ // the corresponding year, month, and day.
+ void YearMonthDayFromDays(int days, int* year, int* month, int* day);
+
+ // Computes the number of days since the epoch for
+ // the first day of the given month in the given year.
+ int DaysFromYearMonth(int year, int month);
+
+ // Cache stamp is used for invalidating caches in JSDate.
+ // We increment the stamp each time when the timezone information changes.
+ // JSDate objects perform stamp check and invalidate their caches if
+ // their saved stamp is not equal to the current stamp.
+ Smi* stamp() { return stamp_; }
+ void* stamp_address() { return &stamp_; }
+
+ // These functions are virtual so that we can override them when testing.
+ virtual int GetDaylightSavingsOffsetFromOS(int64_t time_sec) {
+ double time_ms = static_cast<double>(time_sec * 1000);
+ return static_cast<int>(OS::DaylightSavingsOffset(time_ms));
+ }
+
+ virtual int GetLocalOffsetFromOS() {
+ double offset = OS::LocalTimeOffset();
+ ASSERT(offset < kInvalidLocalOffsetInMs);
+ return static_cast<int>(offset);
+ }
+
+ private:
+ // The implementation relies on the fact that no time zones have
+ // more than one daylight savings offset change per 19 days.
+ // In Egypt in 2010 they decided to suspend DST during Ramadan. This
+ // led to a short interval where DST is in effect from September 10 to
+ // September 30.
+ static const int kDefaultDSTDeltaInSec = 19 * kSecPerDay;
+
+ // Size of the Daylight Savings Time cache.
+ static const int kDSTSize = 32;
+
+ // Daylight Savings Time segment stores a segment of time where
+ // daylight savings offset does not change.
+ struct DST {
+ int start_sec;
+ int end_sec;
+ int offset_ms;
+ int last_used;
+ };
+
+ // Computes the daylight savings offset for the given time.
+ // ECMA 262 - 15.9.1.8
+ int DaylightSavingsOffsetInMs(int64_t time_ms);
+
+ // Sets the before_ and the after_ segments from the DST cache such that
+ // the before_ segment starts earlier than the given time and
+ // the after_ segment start later than the given time.
+ // Both segments might be invalid.
+ // The last_used counters of the before_ and after_ are updated.
+ void ProbeDST(int time_sec);
+
+ // Finds the least recently used segment from the DST cache that is not
+ // equal to the given 'skip' segment.
+ DST* LeastRecentlyUsedDST(DST* skip);
+
+ // Extends the after_ segment with the given point or resets it
+ // if it starts later than the given time + kDefaultDSTDeltaInSec.
+ inline void ExtendTheAfterSegment(int time_sec, int offset_ms);
+
+ // Makes the given segment invalid.
+ inline void ClearSegment(DST* segment);
+
+ bool InvalidSegment(DST* segment) {
+ return segment->start_sec > segment->end_sec;
+ }
+
+ Smi* stamp_;
+
+ // Daylight Saving Time cache.
+ DST dst_[kDSTSize];
+ int dst_usage_counter_;
+ DST* before_;
+ DST* after_;
+
+ int local_offset_ms_;
+
+ // Year/Month/Day cache.
+ bool ymd_valid_;
+ int ymd_days_;
+ int ymd_year_;
+ int ymd_month_;
+ int ymd_day_;
+};
+
+} } // namespace v8::internal
+
+#endif
diff --git a/src/3rdparty/v8/src/date.js b/src/3rdparty/v8/src/date.js
index 8d7d5d8..d0e24ab 100644
--- a/src/3rdparty/v8/src/date.js
+++ b/src/3rdparty/v8/src/date.js
@@ -28,190 +28,22 @@
// This file relies on the fact that the following declarations have been made
// in v8natives.js:
-// const $isFinite = GlobalIsFinite;
+// var $isFinite = GlobalIsFinite;
// -------------------------------------------------------------------
// This file contains date support implemented in JavaScript.
-
// Keep reference to original values of some global properties. This
// has the added benefit that the code in this file is isolated from
// changes to these properties.
-const $Date = global.Date;
+var $Date = global.Date;
// Helper function to throw error.
function ThrowDateTypeError() {
throw new $TypeError('this is not a Date object.');
}
-// ECMA 262 - 5.2
-function Modulo(value, remainder) {
- var mod = value % remainder;
- // Guard against returning -0.
- if (mod == 0) return 0;
- return mod >= 0 ? mod : mod + remainder;
-}
-
-
-function TimeWithinDay(time) {
- return Modulo(time, msPerDay);
-}
-
-
-// ECMA 262 - 15.9.1.3
-function DaysInYear(year) {
- if (year % 4 != 0) return 365;
- if ((year % 100 == 0) && (year % 400 != 0)) return 365;
- return 366;
-}
-
-
-function DayFromYear(year) {
- return 365 * (year-1970)
- + FLOOR((year-1969)/4)
- - FLOOR((year-1901)/100)
- + FLOOR((year-1601)/400);
-}
-
-
-function TimeFromYear(year) {
- return msPerDay * DayFromYear(year);
-}
-
-
-function InLeapYear(time) {
- return DaysInYear(YearFromTime(time)) - 365; // Returns 1 or 0.
-}
-
-
-// ECMA 262 - 15.9.1.9
-function EquivalentYear(year) {
- // Returns an equivalent year in the range [2008-2035] matching
- // - leap year.
- // - week day of first day.
- var time = TimeFromYear(year);
- var recent_year = (InLeapYear(time) == 0 ? 1967 : 1956) +
- (WeekDay(time) * 12) % 28;
- // Find the year in the range 2008..2037 that is equivalent mod 28.
- // Add 3*28 to give a positive argument to the modulus operator.
- return 2008 + (recent_year + 3*28 - 2008) % 28;
-}
-
-
-function EquivalentTime(t) {
- // The issue here is that some library calls don't work right for dates
- // that cannot be represented using a non-negative signed 32 bit integer
- // (measured in whole seconds based on the 1970 epoch).
- // We solve this by mapping the time to a year with same leap-year-ness
- // and same starting day for the year. The ECMAscript specification says
- // we must do this, but for compatibility with other browsers, we use
- // the actual year if it is in the range 1970..2037
- if (t >= 0 && t <= 2.1e12) return t;
-
- var day = MakeDay(EquivalentYear(YearFromTime(t)),
- MonthFromTime(t),
- DateFromTime(t));
- return MakeDate(day, TimeWithinDay(t));
-}
-
-
-// local_time_offset is initialized when the DST_offset_cache is missed.
-// It must not be used until after a call to DaylightSavingsOffset().
-// In this way, only one check, for a DST cache miss, is needed.
-var local_time_offset;
-
-
-// Because computing the DST offset is an expensive operation,
-// we keep a cache of the last computed DST offset along with a time interval
-// where we know the cache is valid.
-// When the cache is valid, local_time_offset is also valid.
-var DST_offset_cache = {
- // Cached DST offset.
- offset: 0,
- // Time interval where the cached offset is valid.
- start: 0, end: -1,
- // Size of next interval expansion.
- increment: 0,
- initial_increment: 19 * msPerDay
-};
-
-
-// NOTE: The implementation relies on the fact that no time zones have
-// more than one daylight savings offset change per 19 days.
-//
-// In Egypt in 2010 they decided to suspend DST during Ramadan. This
-// led to a short interval where DST is in effect from September 10 to
-// September 30.
-//
-// If this function is called with NaN it returns NaN.
-function DaylightSavingsOffset(t) {
- // Load the cache object from the builtins object.
- var cache = DST_offset_cache;
-
- // Cache the start and the end in local variables for fast access.
- var start = cache.start;
- var end = cache.end;
-
- if (start <= t) {
- // If the time fits in the cached interval, return the cached offset.
- if (t <= end) return cache.offset;
-
- // If the cache misses, the local_time_offset may not be initialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
-
- // Compute a possible new interval end.
- var new_end = end + cache.increment;
-
- if (t <= new_end) {
- var end_offset = %DateDaylightSavingsOffset(EquivalentTime(new_end));
- if (cache.offset == end_offset) {
- // If the offset at the end of the new interval still matches
- // the offset in the cache, we grow the cached time interval
- // and return the offset.
- cache.end = new_end;
- cache.increment = cache.initial_increment;
- return end_offset;
- } else {
- var offset = %DateDaylightSavingsOffset(EquivalentTime(t));
- if (offset == end_offset) {
- // The offset at the given time is equal to the offset at the
- // new end of the interval, so that means that we've just skipped
- // the point in time where the DST offset change occurred. Updated
- // the interval to reflect this and reset the increment.
- cache.start = t;
- cache.end = new_end;
- cache.increment = cache.initial_increment;
- } else {
- // The interval contains a DST offset change and the given time is
- // before it. Adjust the increment to avoid a linear search for
- // the offset change point and change the end of the interval.
- cache.increment /= 3;
- cache.end = t;
- }
- // Update the offset in the cache and return it.
- cache.offset = offset;
- return offset;
- }
- }
- }
-
- // If the cache misses, the local_time_offset may not be initialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
- // Compute the DST offset for the time and shrink the cache interval
- // to only contain the time. This allows fast repeated DST offset
- // computations for the same time.
- var offset = %DateDaylightSavingsOffset(EquivalentTime(t));
- cache.offset = offset;
- cache.start = cache.end = t;
- cache.increment = cache.initial_increment;
- return offset;
-}
-
var timezone_cache_time = $NaN;
var timezone_cache_timezone;
@@ -221,57 +53,18 @@ function LocalTimezone(t) {
if (t == timezone_cache_time) {
return timezone_cache_timezone;
}
- var timezone = %DateLocalTimezone(EquivalentTime(t));
+ var timezone = %DateLocalTimezone(t);
timezone_cache_time = t;
timezone_cache_timezone = timezone;
return timezone;
}
-function WeekDay(time) {
- return Modulo(DAY(time) + 4, 7);
-}
-
-
-function LocalTime(time) {
- if (NUMBER_IS_NAN(time)) return time;
- // DaylightSavingsOffset called before local_time_offset used.
- return time + DaylightSavingsOffset(time) + local_time_offset;
-}
-
-
-var ltcache = {
- key: null,
- val: null
-};
-
-function LocalTimeNoCheck(time) {
- var ltc = ltcache;
- if (%_ObjectEquals(time, ltc.key)) return ltc.val;
-
- // Inline the DST offset cache checks for speed.
- // The cache is hit, or DaylightSavingsOffset is called,
- // before local_time_offset is used.
- var cache = DST_offset_cache;
- if (cache.start <= time && time <= cache.end) {
- var dst_offset = cache.offset;
- } else {
- var dst_offset = DaylightSavingsOffset(time);
- }
- ltc.key = time;
- return (ltc.val = time + local_time_offset + dst_offset);
-}
-
-
function UTC(time) {
if (NUMBER_IS_NAN(time)) return time;
// local_time_offset is needed before the call to DaylightSavingsOffset,
// so it may be uninitialized.
- if (IS_UNDEFINED(local_time_offset)) {
- local_time_offset = %DateLocalTimeOffset();
- }
- var tmp = time - local_time_offset;
- return tmp - DaylightSavingsOffset(tmp);
+ return %DateToUTC(time);
}
@@ -294,48 +87,6 @@ function TimeInYear(year) {
}
-var ymd_from_time_cache = [1970, 0, 1];
-var ymd_from_time_cached_time = 0;
-
-function YearFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
-
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t
- }
-
- return ymd_from_time_cache[0];
-}
-
-function MonthFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t
- }
-
- return ymd_from_time_cache[1];
-}
-
-function DateFromTime(t) {
- if (t !== ymd_from_time_cached_time) {
- if (!$isFinite(t)) {
- return $NaN;
- }
-
- %DateYMDFromTime(t, ymd_from_time_cache);
- ymd_from_time_cached_time = t
- }
-
- return ymd_from_time_cache[2];
-}
-
-
// Compute number of days given a year, month, date.
// Note that month and date can lie outside the normal range.
// For example:
@@ -386,9 +137,6 @@ function TimeClip(time) {
var Date_cache = {
// Cached time value.
time: $NaN,
- // Cached year when interpreting the time as a local time. Only
- // valid when the time matches cached time.
- year: $NaN,
// String input for which the cached time is valid.
string: null
};
@@ -405,11 +153,10 @@ var Date_cache = {
var value;
if (argc == 0) {
value = %DateCurrentTime();
-
+ SET_UTC_DATE_VALUE(this, value);
} else if (argc == 1) {
if (IS_NUMBER(year)) {
- value = TimeClip(year);
-
+ value = year;
} else if (IS_STRING(year)) {
// Probe the Date cache. If we already have a time value for the
// given time, we re-use that instead of parsing the string again.
@@ -420,7 +167,6 @@ var Date_cache = {
value = DateParse(year);
if (!NUMBER_IS_NAN(value)) {
cache.time = value;
- cache.year = YearFromTime(LocalTimeNoCheck(value));
cache.string = year;
}
}
@@ -434,9 +180,9 @@ var Date_cache = {
// which is the default for everything else than Date objects.
// This makes us behave like KJS and SpiderMonkey.
var time = ToPrimitive(year, NUMBER_HINT);
- value = IS_STRING(time) ? DateParse(time) : TimeClip(ToNumber(time));
+ value = IS_STRING(time) ? DateParse(time) : ToNumber(time);
}
-
+ SET_UTC_DATE_VALUE(this, value);
} else {
year = ToNumber(year);
month = ToNumber(month);
@@ -445,13 +191,14 @@ var Date_cache = {
minutes = argc > 4 ? ToNumber(minutes) : 0;
seconds = argc > 5 ? ToNumber(seconds) : 0;
ms = argc > 6 ? ToNumber(ms) : 0;
- year = (!NUMBER_IS_NAN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
- ? 1900 + TO_INTEGER(year) : year;
+ year = (!NUMBER_IS_NAN(year) &&
+ 0 <= TO_INTEGER(year) &&
+ TO_INTEGER(year) <= 99) ? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
- value = TimeClip(UTC(MakeDate(day, time)));
+ value = MakeDate(day, time);
+ SET_LOCAL_DATE_VALUE(this, value);
}
- %_SetValueOf(this, value);
});
@@ -459,7 +206,8 @@ var Date_cache = {
var WeekDays = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
-var Months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+var Months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
function TwoDigitString(value) {
@@ -467,50 +215,46 @@ function TwoDigitString(value) {
}
-function DateString(time) {
- return WeekDays[WeekDay(time)] + ' '
- + Months[MonthFromTime(time)] + ' '
- + TwoDigitString(DateFromTime(time)) + ' '
- + YearFromTime(time);
+function DateString(date) {
+ return WeekDays[LOCAL_WEEKDAY(date)] + ' '
+ + Months[LOCAL_MONTH(date)] + ' '
+ + TwoDigitString(LOCAL_DAY(date)) + ' '
+ + LOCAL_YEAR(date);
}
-var LongWeekDays = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
-var LongMonths = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'];
+var LongWeekDays = ['Sunday', 'Monday', 'Tuesday', 'Wednesday',
+ 'Thursday', 'Friday', 'Saturday'];
+var LongMonths = ['January', 'February', 'March', 'April', 'May', 'June',
+ 'July', 'August', 'September', 'October', 'November', 'December'];
-function LongDateString(time) {
- return LongWeekDays[WeekDay(time)] + ', '
- + LongMonths[MonthFromTime(time)] + ' '
- + TwoDigitString(DateFromTime(time)) + ', '
- + YearFromTime(time);
+function LongDateString(date) {
+ return LongWeekDays[LOCAL_WEEKDAY(date)] + ', '
+ + LongMonths[LOCAL_MONTH(date)] + ' '
+ + TwoDigitString(LOCAL_DAY(date)) + ', '
+ + LOCAL_YEAR(date);
}
-function TimeString(time) {
- return TwoDigitString(HOUR_FROM_TIME(time)) + ':'
- + TwoDigitString(MIN_FROM_TIME(time)) + ':'
- + TwoDigitString(SEC_FROM_TIME(time));
+function TimeString(date) {
+ return TwoDigitString(LOCAL_HOUR(date)) + ':'
+ + TwoDigitString(LOCAL_MIN(date)) + ':'
+ + TwoDigitString(LOCAL_SEC(date));
}
-function LocalTimezoneString(time) {
- var old_timezone = timezone_cache_timezone;
- var timezone = LocalTimezone(time);
- if (old_timezone && timezone != old_timezone) {
- // If the timezone string has changed from the one that we cached,
- // the local time offset may now be wrong. So we need to update it
- // and try again.
- local_time_offset = %DateLocalTimeOffset();
- // We also need to invalidate the DST cache as the new timezone may have
- // different DST times.
- var dst_cache = DST_offset_cache;
- dst_cache.start = 0;
- dst_cache.end = -1;
- }
+function TimeStringUTC(date) {
+ return TwoDigitString(UTC_HOUR(date)) + ':'
+ + TwoDigitString(UTC_MIN(date)) + ':'
+ + TwoDigitString(UTC_SEC(date));
+}
+
+
+function LocalTimezoneString(date) {
+ var timezone = LocalTimezone(UTC_DATE_VALUE(date));
- var timezoneOffset =
- (DaylightSavingsOffset(time) + local_time_offset) / msPerMinute;
+ var timezoneOffset = -TIMEZONE_OFFSET(date);
var sign = (timezoneOffset >= 0) ? 1 : -1;
var hours = FLOOR((sign * timezoneOffset)/60);
var min = FLOOR((sign * timezoneOffset)%60);
@@ -520,8 +264,8 @@ function LocalTimezoneString(time) {
}
-function DatePrintString(time) {
- return DateString(time) + ' ' + TimeString(time);
+function DatePrintString(date) {
+ return DateString(date) + ' ' + TimeString(date);
}
// -------------------------------------------------------------------
@@ -556,11 +300,12 @@ function DateUTC(year, month, date, hours, minutes, seconds, ms) {
minutes = argc > 4 ? ToNumber(minutes) : 0;
seconds = argc > 5 ? ToNumber(seconds) : 0;
ms = argc > 6 ? ToNumber(ms) : 0;
- year = (!NUMBER_IS_NAN(year) && 0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
- ? 1900 + TO_INTEGER(year) : year;
+ year = (!NUMBER_IS_NAN(year) &&
+ 0 <= TO_INTEGER(year) &&
+ TO_INTEGER(year) <= 99) ? 1900 + TO_INTEGER(year) : year;
var day = MakeDay(year, month, date);
var time = MakeTime(hours, minutes, seconds, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(day, time)));
+ return TimeClip(MakeDate(day, time));
}
@@ -573,27 +318,30 @@ function DateNow() {
// ECMA 262 - 15.9.5.2
function DateToString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this)
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var time_zone_string = LocalTimezoneString(t); // May update local offset.
- return DatePrintString(LocalTimeNoCheck(t)) + time_zone_string;
+ var time_zone_string = LocalTimezoneString(this)
+ return DatePrintString(this) + time_zone_string;
}
// ECMA 262 - 15.9.5.3
function DateToDateString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- return DateString(LocalTimeNoCheck(t));
+ return DateString(this);
}
// ECMA 262 - 15.9.5.4
function DateToTimeString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var time_zone_string = LocalTimezoneString(t); // May update local offset.
- return TimeString(LocalTimeNoCheck(t)) + time_zone_string;
+ var time_zone_string = LocalTimezoneString(this);
+ return TimeString(this) + time_zone_string;
}
@@ -605,357 +353,387 @@ function DateToLocaleString() {
// ECMA 262 - 15.9.5.6
function DateToLocaleDateString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- return LongDateString(LocalTimeNoCheck(t));
+ return LongDateString(this);
}
// ECMA 262 - 15.9.5.7
function DateToLocaleTimeString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
- var lt = LocalTimeNoCheck(t);
- return TimeString(lt);
+ return TimeString(this);
}
// ECMA 262 - 15.9.5.8
function DateValueOf() {
- return DATE_VALUE(this);
+ CHECK_DATE(this);
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.9
function DateGetTime() {
- return DATE_VALUE(this);
+ CHECK_DATE(this);
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.10
function DateGetFullYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- var cache = Date_cache;
- if (cache.time === t) return cache.year;
- return YearFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_YEAR(this);
}
// ECMA 262 - 15.9.5.11
function DateGetUTCFullYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return YearFromTime(t);
+ CHECK_DATE(this);
+ return UTC_YEAR(this);
}
// ECMA 262 - 15.9.5.12
function DateGetMonth() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MonthFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MONTH(this);
}
// ECMA 262 - 15.9.5.13
function DateGetUTCMonth() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MonthFromTime(t);
+ CHECK_DATE(this);
+ return UTC_MONTH(this);
}
// ECMA 262 - 15.9.5.14
function DateGetDate() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return DateFromTime(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_DAY(this);
}
// ECMA 262 - 15.9.5.15
function DateGetUTCDate() {
- var t = DATE_VALUE(this);
- return NAN_OR_DATE_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_DAY(this);
}
// ECMA 262 - 15.9.5.16
function DateGetDay() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return WeekDay(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.17
function DateGetUTCDay() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return WeekDay(t);
+ CHECK_DATE(this);
+ return UTC_WEEKDAY(this);
}
// ECMA 262 - 15.9.5.18
function DateGetHours() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return HOUR_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_HOUR(this);
}
// ECMA 262 - 15.9.5.19
function DateGetUTCHours() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return HOUR_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_HOUR(this);
}
// ECMA 262 - 15.9.5.20
function DateGetMinutes() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MIN_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MIN(this);
}
// ECMA 262 - 15.9.5.21
function DateGetUTCMinutes() {
- var t = DATE_VALUE(this);
- return NAN_OR_MIN_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_MIN(this);
}
// ECMA 262 - 15.9.5.22
function DateGetSeconds() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return SEC_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_SEC(this);
}
// ECMA 262 - 15.9.5.23
function DateGetUTCSeconds() {
- var t = DATE_VALUE(this);
- return NAN_OR_SEC_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_SEC(this)
}
// ECMA 262 - 15.9.5.24
function DateGetMilliseconds() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return MS_FROM_TIME(LocalTimeNoCheck(t));
+ CHECK_DATE(this);
+ return LOCAL_MS(this);
}
// ECMA 262 - 15.9.5.25
function DateGetUTCMilliseconds() {
- var t = DATE_VALUE(this);
- return NAN_OR_MS_FROM_TIME(t);
+ CHECK_DATE(this);
+ return UTC_MS(this);
}
// ECMA 262 - 15.9.5.26
function DateGetTimezoneOffset() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return t;
- return (t - LocalTimeNoCheck(t)) / msPerMinute;
+ CHECK_DATE(this);
+ return TIMEZONE_OFFSET(this);
}
// ECMA 262 - 15.9.5.27
function DateSetTime(ms) {
- if (!IS_DATE(this)) ThrowDateTypeError();
- return %_SetValueOf(this, TimeClip(ToNumber(ms)));
+ CHECK_DATE(this);
+ SET_UTC_DATE_VALUE(this, ToNumber(ms));
+ return UTC_DATE_VALUE(this);
}
// ECMA 262 - 15.9.5.28
function DateSetMilliseconds(ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
ms = ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), SEC_FROM_TIME(t), ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), LOCAL_SEC(this), ms);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.29
function DateSetUTCMilliseconds(ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
ms = ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), SEC_FROM_TIME(t), ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ var time = MakeTime(UTC_HOUR(this),
+ UTC_MIN(this),
+ UTC_SEC(this),
+ ms);
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.30
function DateSetSeconds(sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
sec = ToNumber(sec);
- ms = %_ArgumentsLength() < 2 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ ms = %_ArgumentsLength() < 2 ? LOCAL_MS(this) : ToNumber(ms);
+ var time = MakeTime(LOCAL_HOUR(this), LOCAL_MIN(this), sec, ms);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.31
function DateSetUTCSeconds(sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
sec = ToNumber(sec);
- ms = %_ArgumentsLength() < 2 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), MIN_FROM_TIME(t), sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ ms = %_ArgumentsLength() < 2 ? UTC_MS(this) : ToNumber(ms);
+ var time = MakeTime(UTC_HOUR(this), UTC_MIN(this), sec, ms);
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.33
function DateSetMinutes(min, sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
- sec = argc < 2 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 3 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), min, sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ sec = argc < 2 ? LOCAL_SEC(this) : ToNumber(sec);
+ ms = argc < 3 ? LOCAL_MS(this) : ToNumber(ms);
+ var time = MakeTime(LOCAL_HOUR(this), min, sec, ms);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.34
function DateSetUTCMinutes(min, sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
min = ToNumber(min);
var argc = %_ArgumentsLength();
- sec = argc < 2 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 3 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
- var time = MakeTime(HOUR_FROM_TIME(t), min, sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ sec = argc < 2 ? UTC_SEC(this) : ToNumber(sec);
+ ms = argc < 3 ? UTC_MS(this) : ToNumber(ms);
+ var time = MakeTime(UTC_HOUR(this), min, sec, ms);
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.35
function DateSetHours(hour, min, sec, ms) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
- min = argc < 2 ? NAN_OR_MIN_FROM_TIME(t) : ToNumber(min);
- sec = argc < 3 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 4 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
+ min = argc < 2 ? LOCAL_MIN(this) : ToNumber(min);
+ sec = argc < 3 ? LOCAL_SEC(this) : ToNumber(sec);
+ ms = argc < 4 ? LOCAL_MS(this) : ToNumber(ms);
var time = MakeTime(hour, min, sec, ms);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(DAY(t), time))));
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(LOCAL_DAYS(this), time));
}
// ECMA 262 - 15.9.5.34
function DateSetUTCHours(hour, min, sec, ms) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
hour = ToNumber(hour);
var argc = %_ArgumentsLength();
- min = argc < 2 ? NAN_OR_MIN_FROM_TIME(t) : ToNumber(min);
- sec = argc < 3 ? NAN_OR_SEC_FROM_TIME(t) : ToNumber(sec);
- ms = argc < 4 ? NAN_OR_MS_FROM_TIME(t) : ToNumber(ms);
+ min = argc < 2 ? UTC_MIN(this) : ToNumber(min);
+ sec = argc < 3 ? UTC_SEC(this) : ToNumber(sec);
+ ms = argc < 4 ? UTC_MS(this) : ToNumber(ms);
var time = MakeTime(hour, min, sec, ms);
- return %_SetValueOf(this, TimeClip(MakeDate(DAY(t), time)));
+ return SET_UTC_DATE_VALUE(this, MakeDate(UTC_DAYS(this), time));
}
// ECMA 262 - 15.9.5.36
function DateSetDate(date) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
date = ToNumber(date);
- var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ var day = MakeDay(LOCAL_YEAR(this), LOCAL_MONTH(this), date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, LOCAL_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.37
function DateSetUTCDate(date) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
date = ToNumber(date);
- var day = MakeDay(YearFromTime(t), MonthFromTime(t), date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ var day = MakeDay(UTC_YEAR(this), UTC_MONTH(this), date);
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, UTC_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.38
function DateSetMonth(month, date) {
- var t = LocalTime(DATE_VALUE(this));
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
month = ToNumber(month);
- date = %_ArgumentsLength() < 2 ? NAN_OR_DATE_FROM_TIME(t) : ToNumber(date);
- var day = MakeDay(YearFromTime(t), month, date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ date = %_ArgumentsLength() < 2 ? LOCAL_DAY(this) : ToNumber(date);
+ var day = MakeDay(LOCAL_YEAR(this), month, date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, LOCAL_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.39
function DateSetUTCMonth(month, date) {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
month = ToNumber(month);
- date = %_ArgumentsLength() < 2 ? NAN_OR_DATE_FROM_TIME(t) : ToNumber(date);
- var day = MakeDay(YearFromTime(t), month, date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ date = %_ArgumentsLength() < 2 ? UTC_DAY(this) : ToNumber(date);
+ var day = MakeDay(UTC_YEAR(this), month, date);
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, UTC_TIME_IN_DAY(this)));
}
// ECMA 262 - 15.9.5.40
function DateSetFullYear(year, month, date) {
- var t = DATE_VALUE(this);
- t = NUMBER_IS_NAN(t) ? 0 : LocalTimeNoCheck(t);
+ CHECK_DATE(this);
+ var t = LOCAL_DATE_VALUE(this);
year = ToNumber(year);
var argc = %_ArgumentsLength();
- month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
- date = argc < 3 ? DateFromTime(t) : ToNumber(date);
+ var time ;
+ if (NUMBER_IS_NAN(t)) {
+ month = argc < 2 ? 0 : ToNumber(month);
+ date = argc < 3 ? 1 : ToNumber(date);
+ time = 0;
+ } else {
+ month = argc < 2 ? LOCAL_MONTH(this) : ToNumber(month);
+ date = argc < 3 ? LOCAL_DAY(this) : ToNumber(date);
+ time = LOCAL_TIME_IN_DAY(this);
+ }
var day = MakeDay(year, month, date);
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, time));
}
// ECMA 262 - 15.9.5.41
function DateSetUTCFullYear(year, month, date) {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) t = 0;
- var argc = %_ArgumentsLength();
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
year = ToNumber(year);
- month = argc < 2 ? MonthFromTime(t) : ToNumber(month);
- date = argc < 3 ? DateFromTime(t) : ToNumber(date);
+ var argc = %_ArgumentsLength();
+ var time ;
+ if (NUMBER_IS_NAN(t)) {
+ month = argc < 2 ? 0 : ToNumber(month);
+ date = argc < 3 ? 1 : ToNumber(date);
+ time = 0;
+ } else {
+ month = argc < 2 ? UTC_MONTH(this) : ToNumber(month);
+ date = argc < 3 ? UTC_DAY(this) : ToNumber(date);
+ time = UTC_TIME_IN_DAY(this);
+ }
var day = MakeDay(year, month, date);
- return %_SetValueOf(this, TimeClip(MakeDate(day, TimeWithinDay(t))));
+ return SET_UTC_DATE_VALUE(this, MakeDate(day, time));
}
// ECMA 262 - 15.9.5.42
function DateToUTCString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) return kInvalidDate;
// Return UTC string of the form: Sat, 31 Jan 1970 23:00:00 GMT
- return WeekDays[WeekDay(t)] + ', '
- + TwoDigitString(DateFromTime(t)) + ' '
- + Months[MonthFromTime(t)] + ' '
- + YearFromTime(t) + ' '
- + TimeString(t) + ' GMT';
+ return WeekDays[UTC_WEEKDAY(this)] + ', '
+ + TwoDigitString(UTC_DAY(this)) + ' '
+ + Months[UTC_MONTH(this)] + ' '
+ + UTC_YEAR(this) + ' '
+ + TimeStringUTC(this) + ' GMT';
}
// ECMA 262 - B.2.4
function DateGetYear() {
- var t = DATE_VALUE(this);
- if (NUMBER_IS_NAN(t)) return $NaN;
- return YearFromTime(LocalTimeNoCheck(t)) - 1900;
+ CHECK_DATE(this);
+ return LOCAL_YEAR(this) - 1900;
}
// ECMA 262 - B.2.5
function DateSetYear(year) {
- var t = LocalTime(DATE_VALUE(this));
- if (NUMBER_IS_NAN(t)) t = 0;
+ CHECK_DATE(this);
year = ToNumber(year);
- if (NUMBER_IS_NAN(year)) return %_SetValueOf(this, $NaN);
+ if (NUMBER_IS_NAN(year)) return SET_UTC_DATE_VALUE(this, $NaN);
year = (0 <= TO_INTEGER(year) && TO_INTEGER(year) <= 99)
? 1900 + TO_INTEGER(year) : year;
- var day = MakeDay(year, MonthFromTime(t), DateFromTime(t));
- return %_SetValueOf(this, TimeClip(UTC(MakeDate(day, TimeWithinDay(t)))));
+ var t = LOCAL_DATE_VALUE(this);
+ var month, date, time;
+ if (NUMBER_IS_NAN(t)) {
+ month = 0;
+ date = 1;
+ time = 0;
+ } else {
+ month = LOCAL_MONTH(this);
+ date = LOCAL_DAY(this);
+ time = LOCAL_TIME_IN_DAY(this);
+ }
+ var day = MakeDay(year, month, date);
+ return SET_LOCAL_DATE_VALUE(this, MakeDate(day, time));
}
@@ -979,7 +757,8 @@ function PadInt(n, digits) {
// ECMA 262 - 15.9.5.43
function DateToISOString() {
- var t = DATE_VALUE(this);
+ CHECK_DATE(this);
+ var t = UTC_DATE_VALUE(this);
if (NUMBER_IS_NAN(t)) throw MakeRangeError("invalid_time_value", []);
var year = this.getUTCFullYear();
var year_string;
@@ -1014,34 +793,13 @@ function DateToJSON(key) {
function ResetDateCache() {
-
- // Reset the local_time_offset:
- local_time_offset = %DateLocalTimeOffset();
-
- // Reset the DST offset cache:
- var cache = DST_offset_cache;
- cache.offset = 0;
- cache.start = 0;
- cache.end = -1;
- cache.increment = 0;
- cache.initial_increment = 19 * msPerDay;
-
// Reset the timezone cache:
timezone_cache_time = $NaN;
timezone_cache_timezone = undefined;
- // Reset the ltcache:
- ltcache.key = null;
- ltcache.val = null;
-
- // Reset the ymd_from_time_cache:
- ymd_from_time_cache = [$NaN, $NaN, $NaN];
- ymd_from_time_cached_time = $NaN;
-
// Reset the date cache:
cache = Date_cache;
cache.time = $NaN;
- cache.year = $NaN;
cache.string = null;
}
@@ -1062,7 +820,7 @@ function SetUpDate() {
// Set up non-enumerable functions of the Date prototype object and
// set their names.
- InstallFunctionsOnHiddenPrototype($Date.prototype, DONT_ENUM, $Array(
+ InstallFunctions($Date.prototype, DONT_ENUM, $Array(
"toString", DateToString,
"toDateString", DateToDateString,
"toTimeString", DateToTimeString,
diff --git a/src/3rdparty/v8/src/debug-agent.cc b/src/3rdparty/v8/src/debug-agent.cc
index 591d0b3..511663d 100644
--- a/src/3rdparty/v8/src/debug-agent.cc
+++ b/src/3rdparty/v8/src/debug-agent.cc
@@ -229,8 +229,6 @@ void DebuggerAgentSession::Shutdown() {
const char* const DebuggerAgentUtil::kContentLength = "Content-Length";
-const int DebuggerAgentUtil::kContentLengthSize =
- StrLength(kContentLength);
SmartArrayPointer<char> DebuggerAgentUtil::ReceiveMessage(const Socket* conn) {
@@ -374,8 +372,11 @@ bool DebuggerAgentUtil::SendMessage(const Socket* conn,
// Calculate the message size in UTF-8 encoding.
int utf8_len = 0;
+ int previous = unibrow::Utf16::kNoPreviousCharacter;
for (int i = 0; i < message.length(); i++) {
- utf8_len += unibrow::Utf8::Length(message[i]);
+ uint16_t character = message[i];
+ utf8_len += unibrow::Utf8::Length(character, previous);
+ previous = character;
}
// Send the header.
@@ -390,17 +391,33 @@ bool DebuggerAgentUtil::SendMessage(const Socket* conn,
// Send message body as UTF-8.
int buffer_position = 0; // Current buffer position.
+ previous = unibrow::Utf16::kNoPreviousCharacter;
for (int i = 0; i < message.length(); i++) {
// Write next UTF-8 encoded character to buffer.
+ uint16_t character = message[i];
buffer_position +=
- unibrow::Utf8::Encode(buffer + buffer_position, message[i]);
+ unibrow::Utf8::Encode(buffer + buffer_position, character, previous);
ASSERT(buffer_position < kBufferSize);
// Send buffer if full or last character is encoded.
- if (kBufferSize - buffer_position < 3 || i == message.length() - 1) {
- conn->Send(buffer, buffer_position);
- buffer_position = 0;
+ if (kBufferSize - buffer_position <
+ unibrow::Utf16::kMaxExtraUtf8BytesForOneUtf16CodeUnit ||
+ i == message.length() - 1) {
+ if (unibrow::Utf16::IsLeadSurrogate(character)) {
+ const int kEncodedSurrogateLength =
+ unibrow::Utf16::kUtf8BytesToCodeASurrogate;
+ ASSERT(buffer_position >= kEncodedSurrogateLength);
+ conn->Send(buffer, buffer_position - kEncodedSurrogateLength);
+ for (int i = 0; i < kEncodedSurrogateLength; i++) {
+ buffer[i] = buffer[buffer_position + i];
+ }
+ buffer_position = kEncodedSurrogateLength;
+ } else {
+ conn->Send(buffer, buffer_position);
+ buffer_position = 0;
+ }
}
+ previous = character;
}
return true;
diff --git a/src/3rdparty/v8/src/debug-agent.h b/src/3rdparty/v8/src/debug-agent.h
index a07fb0f..6115190 100644
--- a/src/3rdparty/v8/src/debug-agent.h
+++ b/src/3rdparty/v8/src/debug-agent.h
@@ -115,7 +115,6 @@ class DebuggerAgentSession: public Thread {
class DebuggerAgentUtil {
public:
static const char* const kContentLength;
- static const int kContentLengthSize;
static SmartArrayPointer<char> ReceiveMessage(const Socket* conn);
static bool SendConnectMessage(const Socket* conn,
diff --git a/src/3rdparty/v8/src/debug-debugger.js b/src/3rdparty/v8/src/debug-debugger.js
index 4e73dcd..802f622 100644
--- a/src/3rdparty/v8/src/debug-debugger.js
+++ b/src/3rdparty/v8/src/debug-debugger.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -26,14 +26,14 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Default number of frames to include in the response to backtrace request.
-const kDefaultBacktraceLength = 10;
+var kDefaultBacktraceLength = 10;
-const Debug = {};
+var Debug = {};
// Regular expression to skip "crud" at the beginning of a source line which is
// not really code. Currently the regular expression matches whitespace and
// comments.
-const sourceLineBeginningSkip = /^(?:\s*(?:\/\*.*?\*\/)*)*/;
+var sourceLineBeginningSkip = /^(?:\s*(?:\/\*.*?\*\/)*)*/;
// Debug events which can occour in the V8 JavaScript engine. These originate
// from the API include file debug.h.
@@ -286,7 +286,7 @@ ScriptBreakPoint.prototype.cloneForOtherScript = function (other_script) {
copy.condition_ = this.condition_;
copy.ignoreCount_ = this.ignoreCount_;
return copy;
-}
+};
ScriptBreakPoint.prototype.number = function() {
@@ -335,13 +335,13 @@ ScriptBreakPoint.prototype.actual_locations = function() {
locations.push(this.break_points_[i].actual_location);
}
return locations;
-}
+};
ScriptBreakPoint.prototype.update_positions = function(line, column) {
this.line_ = line;
this.column_ = column;
-}
+};
ScriptBreakPoint.prototype.hit_count = function() {
@@ -449,11 +449,6 @@ ScriptBreakPoint.prototype.set = function (script) {
actual_position = position;
}
var actual_location = script.locationFromPosition(actual_position, true);
- // Check for any relocation and compare it with the breakpoint_relocation flag
- if (actual_location.line != line && !%AllowBreakPointRelocation()) {
- %ClearBreakPoint(break_point);
- return;
- }
break_point.actual_location = { line: actual_location.line,
column: actual_location.column,
script_id: script.id };
@@ -482,10 +477,11 @@ ScriptBreakPoint.prototype.clear = function () {
// break points set in this script.
function UpdateScriptBreakPoints(script) {
for (var i = 0; i < script_break_points.length; i++) {
- if ((script_break_points[i].type() == Debug.ScriptBreakPointType.ScriptName ||
- script_break_points[i].type() == Debug.ScriptBreakPointType.ScriptRegExp) &&
- script_break_points[i].matchesScript(script)) {
- script_break_points[i].set(script);
+ var break_point = script_break_points[i];
+ if ((break_point.type() == Debug.ScriptBreakPointType.ScriptName ||
+ break_point.type() == Debug.ScriptBreakPointType.ScriptRegExp) &&
+ break_point.matchesScript(script)) {
+ break_point.set(script);
}
}
}
@@ -591,7 +587,7 @@ Debug.findFunctionSourceLocation = function(func, opt_line, opt_column) {
var script = %FunctionGetScript(func);
var script_offset = %FunctionGetScriptSourcePosition(func);
return script.locationFromLine(opt_line, opt_column, script_offset);
-}
+};
// Returns the character position in a script based on a line number and an
@@ -599,7 +595,7 @@ Debug.findFunctionSourceLocation = function(func, opt_line, opt_column) {
Debug.findScriptSourcePosition = function(script, opt_line, opt_column) {
var location = script.locationFromLine(opt_line, opt_column);
return location ? location.position : null;
-}
+};
Debug.findBreakPoint = function(break_point_number, remove) {
@@ -633,7 +629,7 @@ Debug.findBreakPointActualLocations = function(break_point_number) {
}
}
return [];
-}
+};
Debug.setBreakPoint = function(func, opt_line, opt_column, opt_condition) {
if (!IS_FUNCTION(func)) throw new Error('Parameters have wrong types.');
@@ -683,8 +679,9 @@ Debug.setBreakPointByScriptIdAndPosition = function(script_id, position,
{
break_point = MakeBreakPoint(position);
break_point.setCondition(condition);
- if (!enabled)
+ if (!enabled) {
break_point.disable();
+ }
var scripts = this.scripts();
for (var i = 0; i < scripts.length; i++) {
if (script_id == scripts[i].id) {
@@ -777,7 +774,7 @@ Debug.findScriptBreakPoint = function(break_point_number, remove) {
}
}
return script_break_point;
-}
+};
// Sets a breakpoint in a script identified through id or name at the
@@ -805,7 +802,7 @@ Debug.setScriptBreakPoint = function(type, script_id_or_name,
}
return script_break_point.number();
-}
+};
Debug.setScriptBreakPointById = function(script_id,
@@ -814,7 +811,7 @@ Debug.setScriptBreakPointById = function(script_id,
return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptId,
script_id, opt_line, opt_column,
opt_condition, opt_groupId);
-}
+};
Debug.setScriptBreakPointByName = function(script_name,
@@ -823,7 +820,7 @@ Debug.setScriptBreakPointByName = function(script_name,
return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptName,
script_name, opt_line, opt_column,
opt_condition, opt_groupId);
-}
+};
Debug.setScriptBreakPointByRegExp = function(script_regexp,
@@ -832,7 +829,7 @@ Debug.setScriptBreakPointByRegExp = function(script_regexp,
return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptRegExp,
script_regexp, opt_line, opt_column,
opt_condition, opt_groupId);
-}
+};
Debug.enableScriptBreakPoint = function(break_point_number) {
@@ -847,13 +844,15 @@ Debug.disableScriptBreakPoint = function(break_point_number) {
};
-Debug.changeScriptBreakPointCondition = function(break_point_number, condition) {
+Debug.changeScriptBreakPointCondition = function(
+ break_point_number, condition) {
var script_break_point = this.findScriptBreakPoint(break_point_number, false);
script_break_point.setCondition(condition);
};
-Debug.changeScriptBreakPointIgnoreCount = function(break_point_number, ignoreCount) {
+Debug.changeScriptBreakPointIgnoreCount = function(
+ break_point_number, ignoreCount) {
if (ignoreCount < 0) {
throw new Error('Invalid argument');
}
@@ -864,12 +863,12 @@ Debug.changeScriptBreakPointIgnoreCount = function(break_point_number, ignoreCou
Debug.scriptBreakPoints = function() {
return script_break_points;
-}
+};
Debug.clearStepping = function() {
%ClearStepping();
-}
+};
Debug.setBreakOnException = function() {
return %ChangeBreakOnException(Debug.ExceptionBreak.Caught, true);
@@ -946,7 +945,7 @@ ExecutionState.prototype.prepareStep = function(opt_action, opt_count) {
var count = opt_count ? %ToNumber(opt_count) : 1;
return %PrepareStep(this.break_id, action, count);
-}
+};
ExecutionState.prototype.evaluateGlobal = function(source, disable_break,
opt_additional_context) {
@@ -966,8 +965,9 @@ ExecutionState.prototype.threadCount = function() {
ExecutionState.prototype.frame = function(opt_index) {
// If no index supplied return the selected frame.
if (opt_index == null) opt_index = this.selected_frame;
- if (opt_index < 0 || opt_index >= this.frameCount())
+ if (opt_index < 0 || opt_index >= this.frameCount()) {
throw new Error('Illegal frame index.');
+ }
return new FrameMirror(this.break_id, opt_index);
};
@@ -1094,12 +1094,12 @@ ExceptionEvent.prototype.eventType = function() {
ExceptionEvent.prototype.exception = function() {
return this.exception_;
-}
+};
ExceptionEvent.prototype.uncaught = function() {
return this.uncaught_;
-}
+};
ExceptionEvent.prototype.func = function() {
@@ -1191,7 +1191,7 @@ CompileEvent.prototype.toJSONProtocol = function() {
o.body.script = this.script_;
return o.toJSONProtocol();
-}
+};
function MakeNewFunctionEvent(func) {
@@ -1247,7 +1247,7 @@ ScriptCollectedEvent.prototype.toJSONProtocol = function() {
o.body = {};
o.body.script = { id: this.id() };
return o.toJSONProtocol();
-}
+};
function MakeScriptObject_(script, include_source) {
@@ -1264,18 +1264,18 @@ function MakeScriptObject_(script, include_source) {
o.source = script.source();
}
return o;
-};
+}
function DebugCommandProcessor(exec_state, opt_is_running) {
this.exec_state_ = exec_state;
this.running_ = opt_is_running || false;
-};
+}
DebugCommandProcessor.prototype.processDebugRequest = function (request) {
return this.processDebugJSONRequest(request);
-}
+};
function ProtocolMessage(request) {
@@ -1303,13 +1303,13 @@ ProtocolMessage.prototype.setOption = function(name, value) {
this.options_ = {};
}
this.options_[name] = value;
-}
+};
ProtocolMessage.prototype.failed = function(message) {
this.success = false;
this.message = message;
-}
+};
ProtocolMessage.prototype.toJSONProtocol = function() {
@@ -1357,7 +1357,7 @@ ProtocolMessage.prototype.toJSONProtocol = function() {
}
json.running = this.running;
return JSON.stringify(json);
-}
+};
DebugCommandProcessor.prototype.createResponse = function(request) {
@@ -1365,7 +1365,8 @@ DebugCommandProcessor.prototype.createResponse = function(request) {
};
-DebugCommandProcessor.prototype.processDebugJSONRequest = function(json_request) {
+DebugCommandProcessor.prototype.processDebugJSONRequest = function(
+ json_request) {
var request; // Current request.
var response; // Generated response.
try {
@@ -1547,7 +1548,7 @@ DebugCommandProcessor.prototype.continueRequest_ = function(request, response) {
}
}
- // Setup the VM for stepping.
+ // Set up the VM for stepping.
this.exec_state_.prepareStep(action, count);
}
@@ -1652,7 +1653,7 @@ DebugCommandProcessor.prototype.setBreakPointRequest_ =
// Add the break point number to the response.
response.body = { type: type,
- breakpoint: break_point_number }
+ breakpoint: break_point_number };
// Add break point information to the response.
if (break_point instanceof ScriptBreakPoint) {
@@ -1666,7 +1667,8 @@ DebugCommandProcessor.prototype.setBreakPointRequest_ =
response.body.type = 'scriptRegExp';
response.body.script_regexp = break_point.script_regexp_object().source;
} else {
- throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type());
+ throw new Error("Internal error: Unexpected breakpoint type: " +
+ break_point.type());
}
response.body.line = break_point.line();
response.body.column = break_point.column();
@@ -1678,7 +1680,8 @@ DebugCommandProcessor.prototype.setBreakPointRequest_ =
};
-DebugCommandProcessor.prototype.changeBreakPointRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.changeBreakPointRequest_ = function(
+ request, response) {
// Check for legal request.
if (!request.arguments) {
response.failed('Missing arguments');
@@ -1715,10 +1718,11 @@ DebugCommandProcessor.prototype.changeBreakPointRequest_ = function(request, res
if (!IS_UNDEFINED(ignoreCount)) {
Debug.changeBreakPointIgnoreCount(break_point, ignoreCount);
}
-}
+};
-DebugCommandProcessor.prototype.clearBreakPointGroupRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.clearBreakPointGroupRequest_ = function(
+ request, response) {
// Check for legal request.
if (!request.arguments) {
response.failed('Missing arguments');
@@ -1749,10 +1753,11 @@ DebugCommandProcessor.prototype.clearBreakPointGroupRequest_ = function(request,
// Add the cleared break point numbers to the response.
response.body = { breakpoints: cleared_break_points };
-}
+};
-DebugCommandProcessor.prototype.clearBreakPointRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.clearBreakPointRequest_ = function(
+ request, response) {
// Check for legal request.
if (!request.arguments) {
response.failed('Missing arguments');
@@ -1772,11 +1777,12 @@ DebugCommandProcessor.prototype.clearBreakPointRequest_ = function(request, resp
Debug.clearBreakPoint(break_point);
// Add the cleared break point number to the response.
- response.body = { breakpoint: break_point }
-}
+ response.body = { breakpoint: break_point };
+};
-DebugCommandProcessor.prototype.listBreakpointsRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.listBreakpointsRequest_ = function(
+ request, response) {
var array = [];
for (var i = 0; i < script_break_points.length; i++) {
var break_point = script_break_points[i];
@@ -1791,7 +1797,7 @@ DebugCommandProcessor.prototype.listBreakpointsRequest_ = function(request, resp
condition: break_point.condition(),
ignoreCount: break_point.ignoreCount(),
actual_locations: break_point.actual_locations()
- }
+ };
if (break_point.type() == Debug.ScriptBreakPointType.ScriptId) {
description.type = 'scriptId';
@@ -1803,7 +1809,8 @@ DebugCommandProcessor.prototype.listBreakpointsRequest_ = function(request, resp
description.type = 'scriptRegExp';
description.script_regexp = break_point.script_regexp_object().source;
} else {
- throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type());
+ throw new Error("Internal error: Unexpected breakpoint type: " +
+ break_point.type());
}
array.push(description);
}
@@ -1812,15 +1819,15 @@ DebugCommandProcessor.prototype.listBreakpointsRequest_ = function(request, resp
breakpoints: array,
breakOnExceptions: Debug.isBreakOnException(),
breakOnUncaughtExceptions: Debug.isBreakOnUncaughtException()
- }
-}
+ };
+};
DebugCommandProcessor.prototype.disconnectRequest_ =
function(request, response) {
Debug.disableAllBreakPoints();
this.continueRequest_(request, response);
-}
+};
DebugCommandProcessor.prototype.setExceptionBreakRequest_ =
@@ -1865,10 +1872,11 @@ DebugCommandProcessor.prototype.setExceptionBreakRequest_ =
// Add the cleared break point number to the response.
response.body = { 'type': type, 'enabled': enabled };
-}
+};
-DebugCommandProcessor.prototype.backtraceRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.backtraceRequest_ = function(
+ request, response) {
// Get the number of frames.
var total_frames = this.exec_state_.frameCount();
@@ -1876,12 +1884,12 @@ DebugCommandProcessor.prototype.backtraceRequest_ = function(request, response)
if (total_frames == 0) {
response.body = {
totalFrames: total_frames
- }
+ };
return;
}
// Default frame range to include in backtrace.
- var from_index = 0
+ var from_index = 0;
var to_index = kDefaultBacktraceLength;
// Get the range from the arguments.
@@ -1894,7 +1902,7 @@ DebugCommandProcessor.prototype.backtraceRequest_ = function(request, response)
}
if (request.arguments.bottom) {
var tmp_index = total_frames - from_index;
- from_index = total_frames - to_index
+ from_index = total_frames - to_index;
to_index = tmp_index;
}
if (from_index < 0 || to_index < 0) {
@@ -1920,7 +1928,7 @@ DebugCommandProcessor.prototype.backtraceRequest_ = function(request, response)
toFrame: to_index,
totalFrames: total_frames,
frames: frames
- }
+ };
};
@@ -1944,8 +1952,8 @@ DebugCommandProcessor.prototype.frameRequest_ = function(request, response) {
DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) {
- // Get the frame for which the scope or scopes are requested. With no frameNumber
- // argument use the currently selected frame.
+ // Get the frame for which the scope or scopes are requested.
+ // With no frameNumber argument use the currently selected frame.
if (request.arguments && !IS_UNDEFINED(request.arguments.frameNumber)) {
frame_index = request.arguments.frameNumber;
if (frame_index < 0 || this.exec_state_.frameCount() <= frame_index) {
@@ -1955,7 +1963,7 @@ DebugCommandProcessor.prototype.frameForScopeRequest_ = function(request) {
} else {
return this.exec_state_.frame();
}
-}
+};
DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) {
@@ -1978,7 +1986,7 @@ DebugCommandProcessor.prototype.scopesRequest_ = function(request, response) {
toScope: total_scopes,
totalScopes: total_scopes,
scopes: scopes
- }
+ };
};
@@ -2223,7 +2231,8 @@ DebugCommandProcessor.prototype.scriptsRequest_ = function(request, response) {
if (!IS_UNDEFINED(request.arguments.types)) {
types = %ToNumber(request.arguments.types);
if (isNaN(types) || types < 0) {
- return response.failed('Invalid types "' + request.arguments.types + '"');
+ return response.failed('Invalid types "' +
+ request.arguments.types + '"');
}
}
@@ -2292,7 +2301,7 @@ DebugCommandProcessor.prototype.threadsRequest_ = function(request, response) {
var details = %GetThreadDetails(this.exec_state_.break_id, i);
var thread_info = { current: details[0],
id: details[1]
- }
+ };
threads.push(thread_info);
}
@@ -2300,7 +2309,7 @@ DebugCommandProcessor.prototype.threadsRequest_ = function(request, response) {
response.body = {
totalThreads: total_threads,
threads: threads
- }
+ };
};
@@ -2312,7 +2321,7 @@ DebugCommandProcessor.prototype.suspendRequest_ = function(request, response) {
DebugCommandProcessor.prototype.versionRequest_ = function(request, response) {
response.body = {
V8Version: %GetV8Version()
- }
+ };
};
@@ -2328,7 +2337,8 @@ DebugCommandProcessor.prototype.profileRequest_ = function(request, response) {
};
-DebugCommandProcessor.prototype.changeLiveRequest_ = function(request, response) {
+DebugCommandProcessor.prototype.changeLiveRequest_ = function(
+ request, response) {
if (!Debug.LiveEdit) {
return response.failed('LiveEdit feature is not supported');
}
@@ -2399,7 +2409,7 @@ DebugCommandProcessor.prototype.debuggerFlagsRequest_ = function(request,
response.body.flags.push({ name: name, value: value });
}
}
-}
+};
DebugCommandProcessor.prototype.v8FlagsRequest_ = function(request, response) {
@@ -2505,7 +2515,7 @@ DebugCommandProcessor.prototype.lolPrintRequest_ = function(request, response) {
// running.
DebugCommandProcessor.prototype.isRunning = function() {
return this.running_;
-}
+};
DebugCommandProcessor.prototype.systemBreak = function(cmd, args) {
@@ -2521,7 +2531,7 @@ function NumberToHex8Str(n) {
n = n >>> 4;
}
return r;
-};
+}
/**
@@ -2597,7 +2607,7 @@ function ValueToProtocolValue_(value, mirror_serializer) {
case 'string':
case 'number':
json = value;
- break
+ break;
default:
json = null;
diff --git a/src/3rdparty/v8/src/debug.cc b/src/3rdparty/v8/src/debug.cc
index 88149d8..99256ba 100644
--- a/src/3rdparty/v8/src/debug.cc
+++ b/src/3rdparty/v8/src/debug.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -37,6 +37,7 @@
#include "debug.h"
#include "deoptimizer.h"
#include "execution.h"
+#include "full-codegen.h"
#include "global-handles.h"
#include "ic.h"
#include "ic-inl.h"
@@ -85,12 +86,6 @@ static void PrintLn(v8::Local<v8::Value> value) {
}
-static Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind) {
- Isolate* isolate = Isolate::Current();
- return isolate->stub_cache()->ComputeCallDebugBreak(argc, kind);
-}
-
-
static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
Isolate* isolate = Isolate::Current();
return isolate->stub_cache()->ComputeCallDebugPrepareStepIn(argc, kind);
@@ -682,7 +677,7 @@ void ScriptCache::HandleWeakScript(v8::Persistent<v8::Value> obj, void* data) {
}
-void Debug::Setup(bool create_heap_objects) {
+void Debug::SetUp(bool create_heap_objects) {
ThreadInit();
if (create_heap_objects) {
// Get code to handle debug break on return.
@@ -772,15 +767,22 @@ bool Debug::CompileDebuggerScript(int index) {
Handle<JSFunction> function =
factory->NewFunctionFromSharedFunctionInfo(function_info, context);
- Execution::TryCall(function, Handle<Object>(context->global()),
- 0, NULL, &caught_exception);
+ Handle<Object> exception =
+ Execution::TryCall(function, Handle<Object>(context->global()),
+ 0, NULL, &caught_exception);
// Check for caught exceptions.
if (caught_exception) {
+ ASSERT(!isolate->has_pending_exception());
+ MessageLocation computed_location;
+ isolate->ComputeLocation(&computed_location);
Handle<Object> message = MessageHandler::MakeMessageObject(
- "error_loading_debugger", NULL, Vector<Handle<Object> >::empty(),
- Handle<String>(), Handle<JSArray>());
+ "error_loading_debugger", &computed_location,
+ Vector<Handle<Object> >::empty(), Handle<String>(), Handle<JSArray>());
+ ASSERT(!isolate->has_pending_exception());
+ isolate->set_pending_exception(*exception);
MessageHandler::ReportMessage(Isolate::Current(), NULL, message);
+ isolate->clear_pending_exception();
return false;
}
@@ -818,6 +820,9 @@ bool Debug::Load() {
v8::Handle<ObjectTemplate>(),
NULL);
+ // Fail if no context could be created.
+ if (context.is_null()) return false;
+
// Use the debugger context.
SaveContext save(isolate_);
isolate_->set_context(*context);
@@ -827,8 +832,8 @@ bool Debug::Load() {
Handle<GlobalObject> global = Handle<GlobalObject>(context->global());
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate_,
- SetProperty(global, key, Handle<Object>(global->builtins()),
- NONE, kNonStrictMode),
+ JSReceiver::SetProperty(global, key, Handle<Object>(global->builtins()),
+ NONE, kNonStrictMode),
false);
// Compile the JavaScript for the debugger in the debugger context.
@@ -1146,7 +1151,7 @@ void Debug::SetBreakPoint(Handle<SharedFunctionInfo> shared,
Handle<DebugInfo> debug_info = GetDebugInfo(shared);
// Source positions starts with zero.
- ASSERT(source_position >= 0);
+ ASSERT(*source_position >= 0);
// Find the break point and change it.
BreakLocationIterator it(debug_info, SOURCE_BREAK_LOCATIONS);
@@ -1213,7 +1218,7 @@ void Debug::ClearAllBreakPoints() {
void Debug::FloodWithOneShot(Handle<SharedFunctionInfo> shared) {
PrepareForBreakPoints();
- // Make sure the function has setup the debug info.
+ // Make sure the function has set up the debug info.
if (!EnsureDebugInfo(shared)) {
// Return if we failed to retrieve the debug info.
return;
@@ -1228,6 +1233,18 @@ void Debug::FloodWithOneShot(Handle<SharedFunctionInfo> shared) {
}
+void Debug::FloodBoundFunctionWithOneShot(Handle<JSFunction> function) {
+ Handle<FixedArray> new_bindings(function->function_bindings());
+ Handle<Object> bindee(new_bindings->get(JSFunction::kBoundFunctionIndex));
+
+ if (!bindee.is_null() && bindee->IsJSFunction() &&
+ !JSFunction::cast(*bindee)->IsBuiltin()) {
+ Handle<SharedFunctionInfo> shared_info(JSFunction::cast(*bindee)->shared());
+ Debug::FloodWithOneShot(shared_info);
+ }
+}
+
+
void Debug::FloodHandlerWithOneShot() {
// Iterate through the JavaScript stack looking for handlers.
StackFrame::Id id = break_frame_id();
@@ -1447,8 +1464,10 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
expressions_count - 2 - call_function_arg_count);
if (fun->IsJSFunction()) {
Handle<JSFunction> js_function(JSFunction::cast(fun));
- // Don't step into builtins.
- if (!js_function->IsBuiltin()) {
+ if (js_function->shared()->bound()) {
+ Debug::FloodBoundFunctionWithOneShot(js_function);
+ } else if (!js_function->IsBuiltin()) {
+ // Don't step into builtins.
// It will also compile target function if it's not compiled yet.
FloodWithOneShot(Handle<SharedFunctionInfo>(js_function->shared()));
}
@@ -1538,40 +1557,47 @@ bool Debug::IsBreakStub(Code* code) {
// Find the builtin to use for invoking the debug break
Handle<Code> Debug::FindDebugBreak(Handle<Code> code, RelocInfo::Mode mode) {
+ Isolate* isolate = Isolate::Current();
+
// Find the builtin debug break function matching the calling convention
// used by the call site.
if (code->is_inline_cache_stub()) {
switch (code->kind()) {
case Code::CALL_IC:
case Code::KEYED_CALL_IC:
- return ComputeCallDebugBreak(code->arguments_count(), code->kind());
+ return isolate->stub_cache()->ComputeCallDebugBreak(
+ code->arguments_count(), code->kind());
case Code::LOAD_IC:
- return Isolate::Current()->builtins()->LoadIC_DebugBreak();
+ return isolate->builtins()->LoadIC_DebugBreak();
case Code::STORE_IC:
- return Isolate::Current()->builtins()->StoreIC_DebugBreak();
+ return isolate->builtins()->StoreIC_DebugBreak();
case Code::KEYED_LOAD_IC:
- return Isolate::Current()->builtins()->KeyedLoadIC_DebugBreak();
+ return isolate->builtins()->KeyedLoadIC_DebugBreak();
case Code::KEYED_STORE_IC:
- return Isolate::Current()->builtins()->KeyedStoreIC_DebugBreak();
+ return isolate->builtins()->KeyedStoreIC_DebugBreak();
default:
UNREACHABLE();
}
}
if (RelocInfo::IsConstructCall(mode)) {
- Handle<Code> result =
- Isolate::Current()->builtins()->ConstructCall_DebugBreak();
- return result;
+ if (code->has_function_cache()) {
+ return isolate->builtins()->CallConstructStub_Recording_DebugBreak();
+ } else {
+ return isolate->builtins()->CallConstructStub_DebugBreak();
+ }
}
if (code->kind() == Code::STUB) {
ASSERT(code->major_key() == CodeStub::CallFunction);
- Handle<Code> result =
- Isolate::Current()->builtins()->StubNoRegisters_DebugBreak();
- return result;
+ if (code->has_function_cache()) {
+ return isolate->builtins()->CallFunctionStub_Recording_DebugBreak();
+ } else {
+ return isolate->builtins()->CallFunctionStub_DebugBreak();
+ }
}
UNREACHABLE();
@@ -1637,8 +1663,11 @@ void Debug::HandleStepIn(Handle<JSFunction> function,
// Flood the function with one-shot break points if it is called from where
// step into was requested.
if (fp == step_in_fp()) {
- // Don't allow step into functions in the native context.
- if (!function->IsBuiltin()) {
+ if (function->shared()->bound()) {
+ // Handle Function.prototype.bind
+ Debug::FloodBoundFunctionWithOneShot(function);
+ } else if (!function->IsBuiltin()) {
+ // Don't allow step into functions in the native context.
if (function->shared()->code() ==
Isolate::Current()->builtins()->builtin(Builtins::kFunctionApply) ||
function->shared()->code() ==
@@ -1751,13 +1780,141 @@ static bool CompileFullCodeForDebugging(Handle<SharedFunctionInfo> shared,
ASSERT(new_code->has_debug_break_slots());
ASSERT(current_code->is_compiled_optimizable() ==
new_code->is_compiled_optimizable());
- ASSERT(current_code->instruction_size() <= new_code->instruction_size());
}
#endif
return result;
}
+static void CollectActiveFunctionsFromThread(
+ Isolate* isolate,
+ ThreadLocalTop* top,
+ List<Handle<JSFunction> >* active_functions,
+ Object* active_code_marker) {
+ // Find all non-optimized code functions with activation frames
+ // on the stack. This includes functions which have optimized
+ // activations (including inlined functions) on the stack as the
+ // non-optimized code is needed for the lazy deoptimization.
+ for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
+ JavaScriptFrame* frame = it.frame();
+ if (frame->is_optimized()) {
+ List<JSFunction*> functions(Compiler::kMaxInliningLevels + 1);
+ frame->GetFunctions(&functions);
+ for (int i = 0; i < functions.length(); i++) {
+ JSFunction* function = functions[i];
+ active_functions->Add(Handle<JSFunction>(function));
+ function->shared()->code()->set_gc_metadata(active_code_marker);
+ }
+ } else if (frame->function()->IsJSFunction()) {
+ JSFunction* function = JSFunction::cast(frame->function());
+ ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
+ active_functions->Add(Handle<JSFunction>(function));
+ function->shared()->code()->set_gc_metadata(active_code_marker);
+ }
+ }
+}
+
+
+static void RedirectActivationsToRecompiledCodeOnThread(
+ Isolate* isolate,
+ ThreadLocalTop* top) {
+ for (JavaScriptFrameIterator it(isolate, top); !it.done(); it.Advance()) {
+ JavaScriptFrame* frame = it.frame();
+
+ if (frame->is_optimized() || !frame->function()->IsJSFunction()) continue;
+
+ JSFunction* function = JSFunction::cast(frame->function());
+
+ ASSERT(frame->LookupCode()->kind() == Code::FUNCTION);
+
+ Handle<Code> frame_code(frame->LookupCode());
+ if (frame_code->has_debug_break_slots()) continue;
+
+ Handle<Code> new_code(function->shared()->code());
+ if (new_code->kind() != Code::FUNCTION ||
+ !new_code->has_debug_break_slots()) {
+ continue;
+ }
+
+ intptr_t delta = frame->pc() - frame_code->instruction_start();
+ int debug_break_slot_count = 0;
+ int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
+ for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
+ // Check if the pc in the new code with debug break
+ // slots is before this slot.
+ RelocInfo* info = it.rinfo();
+ int debug_break_slot_bytes =
+ debug_break_slot_count * Assembler::kDebugBreakSlotLength;
+ intptr_t new_delta =
+ info->pc() -
+ new_code->instruction_start() -
+ debug_break_slot_bytes;
+ if (new_delta > delta) {
+ break;
+ }
+
+ // Passed a debug break slot in the full code with debug
+ // break slots.
+ debug_break_slot_count++;
+ }
+ int debug_break_slot_bytes =
+ debug_break_slot_count * Assembler::kDebugBreakSlotLength;
+ if (FLAG_trace_deopt) {
+ PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
+ "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
+ "for debugging, "
+ "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
+ reinterpret_cast<intptr_t>(
+ frame_code->instruction_start()),
+ reinterpret_cast<intptr_t>(
+ frame_code->instruction_start()) +
+ frame_code->instruction_size(),
+ frame_code->instruction_size(),
+ reinterpret_cast<intptr_t>(new_code->instruction_start()),
+ reinterpret_cast<intptr_t>(new_code->instruction_start()) +
+ new_code->instruction_size(),
+ new_code->instruction_size(),
+ reinterpret_cast<intptr_t>(frame->pc()),
+ reinterpret_cast<intptr_t>(new_code->instruction_start()) +
+ delta + debug_break_slot_bytes);
+ }
+
+ // Patch the return address to return into the code with
+ // debug break slots.
+ frame->set_pc(
+ new_code->instruction_start() + delta + debug_break_slot_bytes);
+ }
+}
+
+
+class ActiveFunctionsCollector : public ThreadVisitor {
+ public:
+ explicit ActiveFunctionsCollector(List<Handle<JSFunction> >* active_functions,
+ Object* active_code_marker)
+ : active_functions_(active_functions),
+ active_code_marker_(active_code_marker) { }
+
+ void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
+ CollectActiveFunctionsFromThread(isolate,
+ top,
+ active_functions_,
+ active_code_marker_);
+ }
+
+ private:
+ List<Handle<JSFunction> >* active_functions_;
+ Object* active_code_marker_;
+};
+
+
+class ActiveFunctionsRedirector : public ThreadVisitor {
+ public:
+ void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
+ RedirectActivationsToRecompiledCodeOnThread(isolate, top);
+ }
+};
+
+
void Debug::PrepareForBreakPoints() {
// If preparing for the first break point make sure to deoptimize all
// functions as debugging does not work with optimized code.
@@ -1774,60 +1931,62 @@ void Debug::PrepareForBreakPoints() {
{
// We are going to iterate heap to find all functions without
// debug break slots.
- isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ isolate_->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "preparing for breakpoints");
- // Ensure no GC in this scope as we are comparing raw pointer
- // values and performing a heap iteration.
+ // Ensure no GC in this scope as we are going to use gc_metadata
+ // field in the Code object to mark active functions.
AssertNoAllocation no_allocation;
- // Find all non-optimized code functions with activation frames on
- // the stack.
- for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) {
- JavaScriptFrame* frame = it.frame();
- if (frame->function()->IsJSFunction()) {
- JSFunction* function = JSFunction::cast(frame->function());
- if (function->code()->kind() == Code::FUNCTION &&
- !function->code()->has_debug_break_slots())
- active_functions.Add(Handle<JSFunction>(function));
- }
- }
- // Sort the functions on the object pointer value to prepare for
- // the binary search below.
- active_functions.Sort(HandleObjectPointerCompare<JSFunction>);
+ Object* active_code_marker = isolate_->heap()->the_hole_value();
- // Scan the heap for all non-optimized functions which has no
- // debug break slots.
+ CollectActiveFunctionsFromThread(isolate_,
+ isolate_->thread_local_top(),
+ &active_functions,
+ active_code_marker);
+ ActiveFunctionsCollector active_functions_collector(&active_functions,
+ active_code_marker);
+ isolate_->thread_manager()->IterateArchivedThreads(
+ &active_functions_collector);
+
+ // Scan the heap for all non-optimized functions which have no
+ // debug break slots and are not active or inlined into an active
+ // function and mark them for lazy compilation.
HeapIterator iterator;
HeapObject* obj = NULL;
while (((obj = iterator.next()) != NULL)) {
if (obj->IsJSFunction()) {
JSFunction* function = JSFunction::cast(obj);
- if (function->shared()->allows_lazy_compilation() &&
- function->shared()->script()->IsScript() &&
+ SharedFunctionInfo* shared = function->shared();
+ if (shared->allows_lazy_compilation() &&
+ shared->script()->IsScript() &&
function->code()->kind() == Code::FUNCTION &&
- !function->code()->has_debug_break_slots()) {
- bool has_activation =
- SortedListBSearch<Handle<JSFunction> >(
- active_functions,
- Handle<JSFunction>(function),
- HandleObjectPointerCompare<JSFunction>) != -1;
- if (!has_activation) {
- function->set_code(*lazy_compile);
- function->shared()->set_code(*lazy_compile);
- }
+ !function->code()->has_debug_break_slots() &&
+ shared->code()->gc_metadata() != active_code_marker) {
+ function->set_code(*lazy_compile);
+ function->shared()->set_code(*lazy_compile);
}
}
}
- }
- // Now the non-GC scope is left, and the sorting of the functions
- // in active_function is not ensured any more. The code below does
- // not rely on it.
+ // Clear gc_metadata field.
+ for (int i = 0; i < active_functions.length(); i++) {
+ Handle<JSFunction> function = active_functions[i];
+ function->shared()->code()->set_gc_metadata(Smi::FromInt(0));
+ }
+ }
// Now recompile all functions with activation frames and and
// patch the return address to run in the new compiled code.
for (int i = 0; i < active_functions.length(); i++) {
Handle<JSFunction> function = active_functions[i];
+
+ if (function->code()->kind() == Code::FUNCTION &&
+ function->code()->has_debug_break_slots()) {
+ // Nothing to do. Function code already had debug break slots.
+ continue;
+ }
+
Handle<SharedFunctionInfo> shared(function->shared());
// If recompilation is not possible just skip it.
if (shared->is_toplevel() ||
@@ -1838,20 +1997,16 @@ void Debug::PrepareForBreakPoints() {
// Make sure that the shared full code is compiled with debug
// break slots.
- Handle<Code> current_code(function->code());
- if (shared->code()->has_debug_break_slots()) {
- // if the code is already recompiled to have break slots skip
- // recompilation.
- ASSERT(!function->code()->has_debug_break_slots());
- } else {
+ if (!shared->code()->has_debug_break_slots()) {
// Try to compile the full code with debug break slots. If it
// fails just keep the current code.
- ASSERT(shared->code() == *current_code);
+ Handle<Code> current_code(function->shared()->code());
ZoneScope zone_scope(isolate_, DELETE_ON_EXIT);
shared->set_code(*lazy_compile);
bool prev_force_debugger_active =
isolate_->debugger()->force_debugger_active();
isolate_->debugger()->set_force_debugger_active(true);
+ ASSERT(current_code->kind() == Code::FUNCTION);
CompileFullCodeForDebugging(shared, current_code);
isolate_->debugger()->set_force_debugger_active(
prev_force_debugger_active);
@@ -1860,67 +2015,17 @@ void Debug::PrepareForBreakPoints() {
continue;
}
}
- Handle<Code> new_code(shared->code());
-
- // Find the function and patch return address.
- for (JavaScriptFrameIterator it(isolate_); !it.done(); it.Advance()) {
- JavaScriptFrame* frame = it.frame();
- // If the current frame is for this function in its
- // non-optimized form rewrite the return address to continue
- // in the newly compiled full code with debug break slots.
- if (frame->function()->IsJSFunction() &&
- frame->function() == *function &&
- frame->LookupCode()->kind() == Code::FUNCTION) {
- intptr_t delta = frame->pc() - current_code->instruction_start();
- int debug_break_slot_count = 0;
- int mask = RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT);
- for (RelocIterator it(*new_code, mask); !it.done(); it.next()) {
- // Check if the pc in the new code with debug break
- // slots is before this slot.
- RelocInfo* info = it.rinfo();
- int debug_break_slot_bytes =
- debug_break_slot_count * Assembler::kDebugBreakSlotLength;
- intptr_t new_delta =
- info->pc() -
- new_code->instruction_start() -
- debug_break_slot_bytes;
- if (new_delta > delta) {
- break;
- }
-
- // Passed a debug break slot in the full code with debug
- // break slots.
- debug_break_slot_count++;
- }
- int debug_break_slot_bytes =
- debug_break_slot_count * Assembler::kDebugBreakSlotLength;
- if (FLAG_trace_deopt) {
- PrintF("Replacing code %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
- "with %08" V8PRIxPTR " - %08" V8PRIxPTR " (%d) "
- "for debugging, "
- "changing pc from %08" V8PRIxPTR " to %08" V8PRIxPTR "\n",
- reinterpret_cast<intptr_t>(
- current_code->instruction_start()),
- reinterpret_cast<intptr_t>(
- current_code->instruction_start()) +
- current_code->instruction_size(),
- current_code->instruction_size(),
- reinterpret_cast<intptr_t>(new_code->instruction_start()),
- reinterpret_cast<intptr_t>(new_code->instruction_start()) +
- new_code->instruction_size(),
- new_code->instruction_size(),
- reinterpret_cast<intptr_t>(frame->pc()),
- reinterpret_cast<intptr_t>(new_code->instruction_start()) +
- delta + debug_break_slot_bytes);
- }
- // Patch the return address to return into the code with
- // debug break slots.
- frame->set_pc(
- new_code->instruction_start() + delta + debug_break_slot_bytes);
- }
- }
+ // Keep function code in sync with shared function info.
+ function->set_code(shared->code());
}
+
+ RedirectActivationsToRecompiledCodeOnThread(isolate_,
+ isolate_->thread_local_top());
+
+ ActiveFunctionsRedirector active_functions_redirector;
+ isolate_->thread_manager()->IterateArchivedThreads(
+ &active_functions_redirector);
}
}
@@ -2153,8 +2258,9 @@ void Debug::CreateScriptCache() {
// rid of all the cached script wrappers and the second gets rid of the
// scripts which are no longer referenced. The second also sweeps precisely,
// which saves us doing yet another GC to make the heap iterable.
- heap->CollectAllGarbage(Heap::kNoGCFlags);
- heap->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ heap->CollectAllGarbage(Heap::kNoGCFlags, "Debug::CreateScriptCache");
+ heap->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Debug::CreateScriptCache");
ASSERT(script_cache_ == NULL);
script_cache_ = new ScriptCache();
@@ -2204,7 +2310,8 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
// Perform GC to get unreferenced scripts evicted from the cache before
// returning the content.
- isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags);
+ isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags,
+ "Debug::GetLoadedScripts");
// Get the scripts from the cache.
return script_cache_->GetScripts();
@@ -2840,7 +2947,7 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
command.Dispose();
// Return from debug event processing if either the VM is put into the
- // runnning state (through a continue command) or auto continue is active
+ // running state (through a continue command) or auto continue is active
// and there are no more commands queued.
if (running && !HasCommands()) {
return;
@@ -3050,7 +3157,7 @@ bool Debugger::StartAgent(const char* name, int port,
v8::Debug::DebugBreak();
}
- if (Socket::Setup()) {
+ if (Socket::SetUp()) {
if (agent_ == NULL) {
agent_ = new DebuggerAgent(name, port);
agent_->Start();
@@ -3135,7 +3242,7 @@ EnterDebugger::~EnterDebugger() {
debug->SetBreak(break_frame_id_, break_id_);
// Check for leaving the debugger.
- if (prev_ == NULL) {
+ if (!load_failed_ && prev_ == NULL) {
// Clear mirror cache when leaving the debugger. Skip this if there is a
// pending exception as clearing the mirror cache calls back into
// JavaScript. This can happen if the v8::Debug::Call is used in which
diff --git a/src/3rdparty/v8/src/debug.h b/src/3rdparty/v8/src/debug.h
index 3c37186..474b90b 100644
--- a/src/3rdparty/v8/src/debug.h
+++ b/src/3rdparty/v8/src/debug.h
@@ -178,7 +178,9 @@ class ScriptCache : private HashMap {
private:
// Calculate the hash value from the key (script id).
- static uint32_t Hash(int key) { return ComputeIntegerHash(key); }
+ static uint32_t Hash(int key) {
+ return ComputeIntegerHash(key, v8::internal::kZeroHashSeed);
+ }
// Scripts match if their keys (script id) match.
static bool ScriptMatch(void* key1, void* key2) { return key1 == key2; }
@@ -222,7 +224,7 @@ class DebugInfoListNode {
// DebugInfo.
class Debug {
public:
- void Setup(bool create_heap_objects);
+ void SetUp(bool create_heap_objects);
bool Load();
void Unload();
bool IsLoaded() { return !debug_context_.is_null(); }
@@ -237,6 +239,7 @@ class Debug {
void ClearBreakPoint(Handle<Object> break_point_object);
void ClearAllBreakPoints();
void FloodWithOneShot(Handle<SharedFunctionInfo> shared);
+ void FloodBoundFunctionWithOneShot(Handle<JSFunction> function);
void FloodHandlerWithOneShot();
void ChangeBreakOnException(ExceptionBreakType type, bool enable);
bool IsBreakOnException(ExceptionBreakType type);
@@ -400,9 +403,11 @@ class Debug {
static void GenerateStoreICDebugBreak(MacroAssembler* masm);
static void GenerateKeyedLoadICDebugBreak(MacroAssembler* masm);
static void GenerateKeyedStoreICDebugBreak(MacroAssembler* masm);
- static void GenerateConstructCallDebugBreak(MacroAssembler* masm);
static void GenerateReturnDebugBreak(MacroAssembler* masm);
- static void GenerateStubNoRegistersDebugBreak(MacroAssembler* masm);
+ static void GenerateCallFunctionStubDebugBreak(MacroAssembler* masm);
+ static void GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm);
+ static void GenerateCallConstructStubDebugBreak(MacroAssembler* masm);
+ static void GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm);
static void GenerateSlotDebugBreak(MacroAssembler* masm);
static void GeneratePlainReturnLiveEdit(MacroAssembler* masm);
diff --git a/src/3rdparty/v8/src/deoptimizer.cc b/src/3rdparty/v8/src/deoptimizer.cc
index a83493d..2a30ddd 100644
--- a/src/3rdparty/v8/src/deoptimizer.cc
+++ b/src/3rdparty/v8/src/deoptimizer.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -104,10 +104,27 @@ Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
return result;
}
+
+int Deoptimizer::ConvertJSFrameIndexToFrameIndex(int jsframe_index) {
+ if (jsframe_index == 0) return 0;
+
+ int frame_index = 0;
+ while (jsframe_index >= 0) {
+ FrameDescription* frame = output_[frame_index];
+ if (frame->GetFrameType() == StackFrame::JAVA_SCRIPT) {
+ jsframe_index--;
+ }
+ frame_index++;
+ }
+
+ return frame_index - 1;
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
JavaScriptFrame* frame,
- int frame_index,
+ int jsframe_index,
Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
ASSERT(frame->is_optimized());
@@ -116,25 +133,11 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
// Get the function and code from the frame.
JSFunction* function = JSFunction::cast(frame->function());
Code* code = frame->LookupCode();
- Address code_start_address = code->instruction_start();
// Locate the deoptimization point in the code. As we are at a call the
// return address must be at a place in the code with deoptimization support.
- int deoptimization_index = Safepoint::kNoDeoptimizationIndex;
- // Scope this as the safe point constructor will disallow allocation.
- {
- SafepointTable table(code);
- for (unsigned i = 0; i < table.length(); ++i) {
- Address address = code_start_address + table.GetPcOffset(i);
- if (address == frame->pc()) {
- SafepointEntry safepoint_entry = table.GetEntry(i);
- ASSERT(safepoint_entry.deoptimization_index() !=
- Safepoint::kNoDeoptimizationIndex);
- deoptimization_index = safepoint_entry.deoptimization_index();
- break;
- }
- }
- }
+ SafepointEntry safepoint_entry = code->GetSafepointEntry(frame->pc());
+ int deoptimization_index = safepoint_entry.deoptimization_index();
ASSERT(deoptimization_index != Safepoint::kNoDeoptimizationIndex);
// Always use the actual stack slots when calculating the fp to sp
@@ -157,22 +160,48 @@ DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
// Create the GC safe output frame information and register it for GC
// handling.
- ASSERT_LT(frame_index, deoptimizer->output_count());
- DeoptimizedFrameInfo* info =
- new DeoptimizedFrameInfo(deoptimizer, frame_index);
+ ASSERT_LT(jsframe_index, deoptimizer->jsframe_count());
+
+ // Convert JS frame index into frame index.
+ int frame_index = deoptimizer->ConvertJSFrameIndexToFrameIndex(jsframe_index);
+
+ bool has_arguments_adaptor =
+ frame_index > 0 &&
+ deoptimizer->output_[frame_index - 1]->GetFrameType() ==
+ StackFrame::ARGUMENTS_ADAPTOR;
+
+ int construct_offset = has_arguments_adaptor ? 2 : 1;
+ bool has_construct_stub =
+ frame_index >= construct_offset &&
+ deoptimizer->output_[frame_index - construct_offset]->GetFrameType() ==
+ StackFrame::CONSTRUCT;
+
+ DeoptimizedFrameInfo* info = new DeoptimizedFrameInfo(deoptimizer,
+ frame_index,
+ has_arguments_adaptor,
+ has_construct_stub);
isolate->deoptimizer_data()->deoptimized_frame_info_ = info;
// Get the "simulated" top and size for the requested frame.
- Address top =
- reinterpret_cast<Address>(deoptimizer->output_[frame_index]->GetTop());
- uint32_t size = deoptimizer->output_[frame_index]->GetFrameSize();
+ FrameDescription* parameters_frame =
+ deoptimizer->output_[
+ has_arguments_adaptor ? (frame_index - 1) : frame_index];
+
+ uint32_t parameters_size = (info->parameters_count() + 1) * kPointerSize;
+ Address parameters_top = reinterpret_cast<Address>(
+ parameters_frame->GetTop() + (parameters_frame->GetFrameSize() -
+ parameters_size));
+
+ uint32_t expressions_size = info->expression_count() * kPointerSize;
+ Address expressions_top = reinterpret_cast<Address>(
+ deoptimizer->output_[frame_index]->GetTop());
// Done with the GC-unsafe frame descriptions. This re-enables allocation.
deoptimizer->DeleteFrameDescriptions();
// Allocate a heap number for the doubles belonging to this frame.
deoptimizer->MaterializeHeapNumbersForDebuggerInspectableFrame(
- top, size, info);
+ parameters_top, parameters_size, expressions_top, expressions_size, info);
// Finished using the deoptimizer instance.
delete deoptimizer;
@@ -278,11 +307,16 @@ void Deoptimizer::VisitAllOptimizedFunctions(
AssertNoAllocation no_allocation;
// Run through the list of all global contexts and deoptimize.
- Object* global = Isolate::Current()->heap()->global_contexts_list();
- while (!global->IsUndefined()) {
- VisitAllOptimizedFunctionsForGlobalObject(Context::cast(global)->global(),
- visitor);
- global = Context::cast(global)->get(Context::NEXT_CONTEXT_LINK);
+ Object* context = Isolate::Current()->heap()->global_contexts_list();
+ while (!context->IsUndefined()) {
+ // GC can happen when the context is not fully initialized,
+ // so the global field of the context can be undefined.
+ Object* global = Context::cast(context)->get(Context::GLOBAL_INDEX);
+ if (!global->IsUndefined()) {
+ VisitAllOptimizedFunctionsForGlobalObject(JSObject::cast(global),
+ visitor);
+ }
+ context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
}
@@ -322,9 +356,8 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
fp_to_sp_delta_(fp_to_sp_delta),
input_(NULL),
output_count_(0),
+ jsframe_count_(0),
output_(NULL),
- frame_alignment_marker_(isolate->heap()->frame_alignment_marker()),
- has_alignment_padding_(0),
deferred_heap_numbers_(0) {
if (FLAG_trace_deopt && type != OSR) {
if (type == DEBUGGER) {
@@ -386,9 +419,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate,
ASSERT(HEAP->allow_allocation(false));
unsigned size = ComputeInputFrameSize();
input_ = new(size) FrameDescription(size, function);
-#ifdef DEBUG
- input_->SetKind(Code::OPTIMIZED_FUNCTION);
-#endif
+ input_->SetFrameType(StackFrame::JAVA_SCRIPT);
}
@@ -426,7 +457,7 @@ Address Deoptimizer::GetDeoptimizationEntry(int id, BailoutType type) {
base = data->lazy_deoptimization_entry_code_;
}
return
- static_cast<Address>(base->body()) + (id * table_entry_size_);
+ static_cast<Address>(base->area_start()) + (id * table_entry_size_);
}
@@ -439,14 +470,14 @@ int Deoptimizer::GetDeoptimizationId(Address addr, BailoutType type) {
base = data->lazy_deoptimization_entry_code_;
}
if (base == NULL ||
- addr < base->body() ||
- addr >= base->body() +
+ addr < base->area_start() ||
+ addr >= base->area_start() +
(kNumberOfEntries * table_entry_size_)) {
return kNotDeoptimizationEntry;
}
ASSERT_EQ(0,
- static_cast<int>(addr - base->body()) % table_entry_size_);
- return static_cast<int>(addr - base->body()) / table_entry_size_;
+ static_cast<int>(addr - base->area_start()) % table_entry_size_);
+ return static_cast<int>(addr - base->area_start()) / table_entry_size_;
}
@@ -524,6 +555,7 @@ void Deoptimizer::DoComputeOutputFrames() {
// Read the number of output frames and allocate an array for their
// descriptions.
int count = iterator.Next();
+ iterator.Next(); // Drop JS frames count.
ASSERT(output_ == NULL);
output_ = new FrameDescription*[count];
for (int i = 0; i < count; ++i) {
@@ -533,7 +565,24 @@ void Deoptimizer::DoComputeOutputFrames() {
// Translate each output frame.
for (int i = 0; i < count; ++i) {
- DoComputeFrame(&iterator, i);
+ // Read the ast node id, function, and frame height for this output frame.
+ Translation::Opcode opcode =
+ static_cast<Translation::Opcode>(iterator.Next());
+ switch (opcode) {
+ case Translation::JS_FRAME:
+ DoComputeJSFrame(&iterator, i);
+ jsframe_count_++;
+ break;
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ DoComputeArgumentsAdaptorFrame(&iterator, i);
+ break;
+ case Translation::CONSTRUCT_STUB_FRAME:
+ DoComputeConstructStubFrame(&iterator, i);
+ break;
+ default:
+ UNREACHABLE();
+ break;
+ }
}
// Print some helpful diagnostic information.
@@ -574,39 +623,52 @@ void Deoptimizer::MaterializeHeapNumbers() {
#ifdef ENABLE_DEBUGGER_SUPPORT
void Deoptimizer::MaterializeHeapNumbersForDebuggerInspectableFrame(
- Address top, uint32_t size, DeoptimizedFrameInfo* info) {
+ Address parameters_top,
+ uint32_t parameters_size,
+ Address expressions_top,
+ uint32_t expressions_size,
+ DeoptimizedFrameInfo* info) {
ASSERT_EQ(DEBUGGER, bailout_type_);
+ Address parameters_bottom = parameters_top + parameters_size;
+ Address expressions_bottom = expressions_top + expressions_size;
for (int i = 0; i < deferred_heap_numbers_.length(); i++) {
HeapNumberMaterializationDescriptor d = deferred_heap_numbers_[i];
// Check of the heap number to materialize actually belong to the frame
// being extracted.
Address slot = d.slot_address();
- if (top <= slot && slot < top + size) {
+ if (parameters_top <= slot && slot < parameters_bottom) {
Handle<Object> num = isolate_->factory()->NewNumber(d.value());
- // Calculate the index with the botton of the expression stack
- // at index 0, and the fixed part (including incoming arguments)
- // at negative indexes.
- int index = static_cast<int>(
- info->expression_count_ - (slot - top) / kPointerSize - 1);
+
+ int index = (info->parameters_count() - 1) -
+ static_cast<int>(slot - parameters_top) / kPointerSize;
+
if (FLAG_trace_deopt) {
PrintF("Materializing a new heap number %p [%e] in slot %p"
- "for stack index %d\n",
+ "for parameter slot #%d\n",
reinterpret_cast<void*>(*num),
d.value(),
d.slot_address(),
index);
}
- if (index >=0) {
- info->SetExpression(index, *num);
- } else {
- // Calculate parameter index subtracting one for the receiver.
- int parameter_index =
- index +
- static_cast<int>(size) / kPointerSize -
- info->expression_count_ - 1;
- info->SetParameter(parameter_index, *num);
+
+ info->SetParameter(index, *num);
+ } else if (expressions_top <= slot && slot < expressions_bottom) {
+ Handle<Object> num = isolate_->factory()->NewNumber(d.value());
+
+ int index = info->expression_count() - 1 -
+ static_cast<int>(slot - expressions_top) / kPointerSize;
+
+ if (FLAG_trace_deopt) {
+ PrintF("Materializing a new heap number %p [%e] in slot %p"
+ "for expression slot #%d\n",
+ reinterpret_cast<void*>(*num),
+ d.value(),
+ d.slot_address(),
+ index);
}
+
+ info->SetExpression(index, *num);
}
}
}
@@ -631,7 +693,9 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
switch (opcode) {
case Translation::BEGIN:
- case Translation::FRAME:
+ case Translation::JS_FRAME:
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE();
return;
@@ -700,7 +764,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::STACK_SLOT: {
int input_slot_index = iterator->Next();
unsigned input_offset =
- input_->GetOffsetFromSlotIndex(this, input_slot_index);
+ input_->GetOffsetFromSlotIndex(input_slot_index);
intptr_t input_value = input_->GetFrameSlot(input_offset);
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": ",
@@ -719,7 +783,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::INT32_STACK_SLOT: {
int input_slot_index = iterator->Next();
unsigned input_offset =
- input_->GetOffsetFromSlotIndex(this, input_slot_index);
+ input_->GetOffsetFromSlotIndex(input_slot_index);
intptr_t value = input_->GetFrameSlot(input_offset);
bool is_smi = Smi::IsValid(value);
if (FLAG_trace_deopt) {
@@ -748,7 +812,7 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::DOUBLE_STACK_SLOT: {
int input_slot_index = iterator->Next();
unsigned input_offset =
- input_->GetOffsetFromSlotIndex(this, input_slot_index);
+ input_->GetOffsetFromSlotIndex(input_slot_index);
double value = input_->GetDoubleFrameSlot(input_offset);
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- %e ; [esp + %d]\n",
@@ -781,7 +845,6 @@ void Deoptimizer::DoTranslateCommand(TranslationIterator* iterator,
case Translation::ARGUMENTS_OBJECT: {
// Use the arguments marker value as a sentinel and fill in the arguments
// object after the deoptimized frame is built.
- ASSERT(frame_index == 0); // Only supported for first frame.
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- ",
output_[frame_index]->GetTop() + output_offset,
@@ -817,7 +880,9 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
switch (opcode) {
case Translation::BEGIN:
- case Translation::FRAME:
+ case Translation::JS_FRAME:
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
case Translation::DUPLICATE:
UNREACHABLE(); // Malformed input.
return false;
@@ -880,7 +945,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
case Translation::STACK_SLOT: {
int output_index = iterator->Next();
unsigned output_offset =
- output->GetOffsetFromSlotIndex(this, output_index);
+ output->GetOffsetFromSlotIndex(output_index);
if (FLAG_trace_osr) {
PrintF(" [sp + %d] <- 0x%08" V8PRIxPTR " ; [sp + %d] ",
output_offset,
@@ -899,7 +964,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
int output_index = iterator->Next();
unsigned output_offset =
- output->GetOffsetFromSlotIndex(this, output_index);
+ output->GetOffsetFromSlotIndex(output_index);
int int32_value = input_object->IsSmi()
? Smi::cast(input_object)->value()
: DoubleToInt32(input_object->Number());
@@ -931,7 +996,7 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator,
int output_index = iterator->Next();
unsigned output_offset =
- output->GetOffsetFromSlotIndex(this, output_index);
+ output->GetOffsetFromSlotIndex(output_index);
double double_value = input_object->Number();
uint64_t int_value = BitCast<uint64_t, double>(double_value);
int32_t lower = static_cast<int32_t>(int_value);
@@ -1042,8 +1107,8 @@ unsigned Deoptimizer::ComputeInputFrameSize() const {
unsigned Deoptimizer::ComputeFixedSize(JSFunction* function) const {
// The fixed part of the frame consists of the return address, frame
// pointer, function, context, and all the incoming arguments.
- static const unsigned kFixedSlotSize = 4 * kPointerSize;
- return ComputeIncomingArgumentSize(function) + kFixedSlotSize;
+ return ComputeIncomingArgumentSize(function) +
+ StandardFrameConstants::kFixedFrameSize;
}
@@ -1097,11 +1162,12 @@ MemoryChunk* Deoptimizer::CreateCode(BailoutType type) {
Isolate::Current()->memory_allocator()->AllocateChunk(desc.instr_size,
EXECUTABLE,
NULL);
+ ASSERT(chunk->area_size() >= desc.instr_size);
if (chunk == NULL) {
V8::FatalProcessOutOfMemory("Not enough memory for deoptimization table");
}
- memcpy(chunk->body(), desc.buffer, desc.instr_size);
- CPU::FlushICache(chunk->body(), desc.instr_size);
+ memcpy(chunk->area_start(), desc.buffer, desc.instr_size);
+ CPU::FlushICache(chunk->area_start(), desc.instr_size);
return chunk;
}
@@ -1150,7 +1216,8 @@ FrameDescription::FrameDescription(uint32_t frame_size,
function_(function),
top_(kZapUint32),
pc_(kZapUint32),
- fp_(kZapUint32) {
+ fp_(kZapUint32),
+ context_(kZapUint32) {
// Zap all the registers.
for (int r = 0; r < Register::kNumRegisters; r++) {
SetRegister(r, kZapUint32);
@@ -1163,49 +1230,62 @@ FrameDescription::FrameDescription(uint32_t frame_size,
}
-unsigned FrameDescription::GetOffsetFromSlotIndex(Deoptimizer* deoptimizer,
- int slot_index) {
+int FrameDescription::ComputeFixedSize() {
+ return StandardFrameConstants::kFixedFrameSize +
+ (ComputeParametersCount() + 1) * kPointerSize;
+}
+
+
+unsigned FrameDescription::GetOffsetFromSlotIndex(int slot_index) {
if (slot_index >= 0) {
// Local or spill slots. Skip the fixed part of the frame
// including all arguments.
- unsigned base =
- GetFrameSize() - deoptimizer->ComputeFixedSize(GetFunction());
+ unsigned base = GetFrameSize() - ComputeFixedSize();
return base - ((slot_index + 1) * kPointerSize);
} else {
// Incoming parameter.
- unsigned base = GetFrameSize() -
- deoptimizer->ComputeIncomingArgumentSize(GetFunction());
+ int arg_size = (ComputeParametersCount() + 1) * kPointerSize;
+ unsigned base = GetFrameSize() - arg_size;
return base - ((slot_index + 1) * kPointerSize);
}
}
int FrameDescription::ComputeParametersCount() {
- return function_->shared()->formal_parameter_count();
+ switch (type_) {
+ case StackFrame::JAVA_SCRIPT:
+ return function_->shared()->formal_parameter_count();
+ case StackFrame::ARGUMENTS_ADAPTOR: {
+ // Last slot contains number of incomming arguments as a smi.
+ // Can't use GetExpression(0) because it would cause infinite recursion.
+ return reinterpret_cast<Smi*>(*GetFrameSlotPointer(0))->value();
+ }
+ default:
+ UNREACHABLE();
+ return 0;
+ }
}
-Object* FrameDescription::GetParameter(Deoptimizer* deoptimizer, int index) {
- ASSERT_EQ(Code::FUNCTION, kind_);
+Object* FrameDescription::GetParameter(int index) {
ASSERT(index >= 0);
ASSERT(index < ComputeParametersCount());
// The slot indexes for incoming arguments are negative.
- unsigned offset = GetOffsetFromSlotIndex(deoptimizer,
- index - ComputeParametersCount());
+ unsigned offset = GetOffsetFromSlotIndex(index - ComputeParametersCount());
return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
}
-unsigned FrameDescription::GetExpressionCount(Deoptimizer* deoptimizer) {
- ASSERT_EQ(Code::FUNCTION, kind_);
- unsigned size = GetFrameSize() - deoptimizer->ComputeFixedSize(GetFunction());
+unsigned FrameDescription::GetExpressionCount() {
+ ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
+ unsigned size = GetFrameSize() - ComputeFixedSize();
return size / kPointerSize;
}
-Object* FrameDescription::GetExpression(Deoptimizer* deoptimizer, int index) {
- ASSERT_EQ(Code::FUNCTION, kind_);
- unsigned offset = GetOffsetFromSlotIndex(deoptimizer, index);
+Object* FrameDescription::GetExpression(int index) {
+ ASSERT_EQ(StackFrame::JAVA_SCRIPT, type_);
+ unsigned offset = GetOffsetFromSlotIndex(index);
return reinterpret_cast<Object*>(*GetFrameSlotPointer(offset));
}
@@ -1251,8 +1331,22 @@ Handle<ByteArray> TranslationBuffer::CreateByteArray() {
}
-void Translation::BeginFrame(int node_id, int literal_id, unsigned height) {
- buffer_->Add(FRAME);
+void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
+ buffer_->Add(CONSTRUCT_STUB_FRAME);
+ buffer_->Add(literal_id);
+ buffer_->Add(height);
+}
+
+
+void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
+ buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
+ buffer_->Add(literal_id);
+ buffer_->Add(height);
+}
+
+
+void Translation::BeginJSFrame(int node_id, int literal_id, unsigned height) {
+ buffer_->Add(JS_FRAME);
buffer_->Add(node_id);
buffer_->Add(literal_id);
buffer_->Add(height);
@@ -1316,7 +1410,6 @@ int Translation::NumberOfOperandsFor(Opcode opcode) {
case ARGUMENTS_OBJECT:
case DUPLICATE:
return 0;
- case BEGIN:
case REGISTER:
case INT32_REGISTER:
case DOUBLE_REGISTER:
@@ -1325,7 +1418,11 @@ int Translation::NumberOfOperandsFor(Opcode opcode) {
case DOUBLE_STACK_SLOT:
case LITERAL:
return 1;
- case FRAME:
+ case BEGIN:
+ case ARGUMENTS_ADAPTOR_FRAME:
+ case CONSTRUCT_STUB_FRAME:
+ return 2;
+ case JS_FRAME:
return 3;
}
UNREACHABLE();
@@ -1339,8 +1436,12 @@ const char* Translation::StringFor(Opcode opcode) {
switch (opcode) {
case BEGIN:
return "BEGIN";
- case FRAME:
- return "FRAME";
+ case JS_FRAME:
+ return "JS_FRAME";
+ case ARGUMENTS_ADAPTOR_FRAME:
+ return "ARGUMENTS_ADAPTOR_FRAME";
+ case CONSTRUCT_STUB_FRAME:
+ return "CONSTRUCT_STUB_FRAME";
case REGISTER:
return "REGISTER";
case INT32_REGISTER:
@@ -1394,7 +1495,9 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
switch (opcode) {
case Translation::BEGIN:
- case Translation::FRAME:
+ case Translation::JS_FRAME:
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME:
// Peeled off before getting here.
break;
@@ -1440,9 +1543,27 @@ SlotRef SlotRef::ComputeSlotForNextArgument(TranslationIterator* iterator,
}
-void SlotRef::ComputeSlotMappingForArguments(JavaScriptFrame* frame,
- int inlined_frame_index,
- Vector<SlotRef>* args_slots) {
+void SlotRef::ComputeSlotsForArguments(Vector<SlotRef>* args_slots,
+ TranslationIterator* it,
+ DeoptimizationInputData* data,
+ JavaScriptFrame* frame) {
+ // Process the translation commands for the arguments.
+
+ // Skip the translation command for the receiver.
+ it->Skip(Translation::NumberOfOperandsFor(
+ static_cast<Translation::Opcode>(it->Next())));
+
+ // Compute slots for arguments.
+ for (int i = 0; i < args_slots->length(); ++i) {
+ (*args_slots)[i] = ComputeSlotForNextArgument(it, data, frame);
+ }
+}
+
+
+Vector<SlotRef> SlotRef::ComputeSlotMappingForArguments(
+ JavaScriptFrame* frame,
+ int inlined_jsframe_index,
+ int formal_parameter_count) {
AssertNoAllocation no_gc;
int deopt_index = AstNode::kNoNumber;
DeoptimizationInputData* data =
@@ -1451,51 +1572,81 @@ void SlotRef::ComputeSlotMappingForArguments(JavaScriptFrame* frame,
data->TranslationIndex(deopt_index)->value());
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
ASSERT(opcode == Translation::BEGIN);
- int frame_count = it.Next();
- USE(frame_count);
- ASSERT(frame_count > inlined_frame_index);
- int frames_to_skip = inlined_frame_index;
+ it.Next(); // Drop frame count.
+ int jsframe_count = it.Next();
+ USE(jsframe_count);
+ ASSERT(jsframe_count > inlined_jsframe_index);
+ int jsframes_to_skip = inlined_jsframe_index;
while (true) {
opcode = static_cast<Translation::Opcode>(it.Next());
- // Skip over operands to advance to the next opcode.
- it.Skip(Translation::NumberOfOperandsFor(opcode));
- if (opcode == Translation::FRAME) {
- if (frames_to_skip == 0) {
+ if (opcode == Translation::ARGUMENTS_ADAPTOR_FRAME) {
+ if (jsframes_to_skip == 0) {
+ ASSERT(Translation::NumberOfOperandsFor(opcode) == 2);
+
+ it.Skip(1); // literal id
+ int height = it.Next();
+
+ // We reached the arguments adaptor frame corresponding to the
+ // inlined function in question. Number of arguments is height - 1.
+ Vector<SlotRef> args_slots =
+ Vector<SlotRef>::New(height - 1); // Minus receiver.
+ ComputeSlotsForArguments(&args_slots, &it, data, frame);
+ return args_slots;
+ }
+ } else if (opcode == Translation::JS_FRAME) {
+ if (jsframes_to_skip == 0) {
+ // Skip over operands to advance to the next opcode.
+ it.Skip(Translation::NumberOfOperandsFor(opcode));
+
// We reached the frame corresponding to the inlined function
// in question. Process the translation commands for the
- // arguments.
- //
- // Skip the translation command for the receiver.
- it.Skip(Translation::NumberOfOperandsFor(
- static_cast<Translation::Opcode>(it.Next())));
- // Compute slots for arguments.
- for (int i = 0; i < args_slots->length(); ++i) {
- (*args_slots)[i] = ComputeSlotForNextArgument(&it, data, frame);
- }
- return;
+ // arguments. Number of arguments is equal to the number of
+ // format parameter count.
+ Vector<SlotRef> args_slots =
+ Vector<SlotRef>::New(formal_parameter_count);
+ ComputeSlotsForArguments(&args_slots, &it, data, frame);
+ return args_slots;
}
- frames_to_skip--;
+ jsframes_to_skip--;
}
+
+ // Skip over operands to advance to the next opcode.
+ it.Skip(Translation::NumberOfOperandsFor(opcode));
}
UNREACHABLE();
+ return Vector<SlotRef>();
}
#ifdef ENABLE_DEBUGGER_SUPPORT
-DeoptimizedFrameInfo::DeoptimizedFrameInfo(
- Deoptimizer* deoptimizer, int frame_index) {
+DeoptimizedFrameInfo::DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
+ int frame_index,
+ bool has_arguments_adaptor,
+ bool has_construct_stub) {
FrameDescription* output_frame = deoptimizer->output_[frame_index];
- SetFunction(output_frame->GetFunction());
- expression_count_ = output_frame->GetExpressionCount(deoptimizer);
+ function_ = output_frame->GetFunction();
+ has_construct_stub_ = has_construct_stub;
+ expression_count_ = output_frame->GetExpressionCount();
+ expression_stack_ = new Object*[expression_count_];
+ // Get the source position using the unoptimized code.
+ Address pc = reinterpret_cast<Address>(output_frame->GetPc());
+ Code* code = Code::cast(Isolate::Current()->heap()->FindCodeObject(pc));
+ source_position_ = code->SourcePosition(pc);
+
+ for (int i = 0; i < expression_count_; i++) {
+ SetExpression(i, output_frame->GetExpression(i));
+ }
+
+ if (has_arguments_adaptor) {
+ output_frame = deoptimizer->output_[frame_index - 1];
+ ASSERT(output_frame->GetFrameType() == StackFrame::ARGUMENTS_ADAPTOR);
+ }
+
parameters_count_ = output_frame->ComputeParametersCount();
parameters_ = new Object*[parameters_count_];
for (int i = 0; i < parameters_count_; i++) {
- SetParameter(i, output_frame->GetParameter(deoptimizer, i));
- }
- expression_stack_ = new Object*[expression_count_];
- for (int i = 0; i < expression_count_; i++) {
- SetExpression(i, output_frame->GetExpression(deoptimizer, i));
+ SetParameter(i, output_frame->GetParameter(i));
}
}
@@ -1505,6 +1656,7 @@ DeoptimizedFrameInfo::~DeoptimizedFrameInfo() {
delete[] parameters_;
}
+
void DeoptimizedFrameInfo::Iterate(ObjectVisitor* v) {
v->VisitPointer(BitCast<Object**>(&function_));
v->VisitPointers(parameters_, parameters_ + parameters_count_);
diff --git a/src/3rdparty/v8/src/deoptimizer.h b/src/3rdparty/v8/src/deoptimizer.h
index 284676c..6bc4a51 100644
--- a/src/3rdparty/v8/src/deoptimizer.h
+++ b/src/3rdparty/v8/src/deoptimizer.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -119,6 +119,9 @@ class Deoptimizer : public Malloced {
int output_count() const { return output_count_; }
+ // Number of created JS frames. Not all created frames are necessarily JS.
+ int jsframe_count() const { return jsframe_count_; }
+
static Deoptimizer* New(JSFunction* function,
BailoutType type,
unsigned bailout_id,
@@ -131,7 +134,7 @@ class Deoptimizer : public Malloced {
// The returned object with information on the optimized frame needs to be
// freed before another one can be generated.
static DeoptimizedFrameInfo* DebuggerInspectableFrame(JavaScriptFrame* frame,
- int frame_index,
+ int jsframe_index,
Isolate* isolate);
static void DeleteDebuggerInspectableFrame(DeoptimizedFrameInfo* info,
Isolate* isolate);
@@ -196,7 +199,11 @@ class Deoptimizer : public Malloced {
void MaterializeHeapNumbers();
#ifdef ENABLE_DEBUGGER_SUPPORT
void MaterializeHeapNumbersForDebuggerInspectableFrame(
- Address top, uint32_t size, DeoptimizedFrameInfo* info);
+ Address parameters_top,
+ uint32_t parameters_size,
+ Address expressions_top,
+ uint32_t expressions_size,
+ DeoptimizedFrameInfo* info);
#endif
static void ComputeOutputFrames(Deoptimizer* deoptimizer);
@@ -213,11 +220,6 @@ class Deoptimizer : public Malloced {
return OFFSET_OF(Deoptimizer, output_count_);
}
static int output_offset() { return OFFSET_OF(Deoptimizer, output_); }
- static int frame_alignment_marker_offset() {
- return OFFSET_OF(Deoptimizer, frame_alignment_marker_); }
- static int has_alignment_padding_offset() {
- return OFFSET_OF(Deoptimizer, has_alignment_padding_);
- }
static int GetDeoptimizedCodeCount(Isolate* isolate);
@@ -257,8 +259,10 @@ class Deoptimizer : public Malloced {
int count_;
};
+ int ConvertJSFrameIndexToFrameIndex(int jsframe_index);
+
private:
- static const int kNumberOfEntries = 4096;
+ static const int kNumberOfEntries = 16384;
Deoptimizer(Isolate* isolate,
JSFunction* function,
@@ -271,7 +275,11 @@ class Deoptimizer : public Malloced {
void DoComputeOutputFrames();
void DoComputeOsrOutputFrame();
- void DoComputeFrame(TranslationIterator* iterator, int frame_index);
+ void DoComputeJSFrame(TranslationIterator* iterator, int frame_index);
+ void DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
+ int frame_index);
+ void DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index);
void DoTranslateCommand(TranslationIterator* iterator,
int frame_index,
unsigned output_offset);
@@ -319,13 +327,11 @@ class Deoptimizer : public Malloced {
FrameDescription* input_;
// Number of output frames.
int output_count_;
+ // Number of output js frames.
+ int jsframe_count_;
// Array of output frame descriptions.
FrameDescription** output_;
- // Frames can be dynamically padded on ia32 to align untagged doubles.
- Object* frame_alignment_marker_;
- intptr_t has_alignment_padding_;
-
List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
static const int table_entry_size_;
@@ -362,7 +368,7 @@ class FrameDescription {
JSFunction* GetFunction() const { return function_; }
- unsigned GetOffsetFromSlotIndex(Deoptimizer* deoptimizer, int slot_index);
+ unsigned GetOffsetFromSlotIndex(int slot_index);
intptr_t GetFrameSlot(unsigned offset) {
return *GetFrameSlotPointer(offset);
@@ -418,27 +424,28 @@ class FrameDescription {
intptr_t GetFp() const { return fp_; }
void SetFp(intptr_t fp) { fp_ = fp; }
+ intptr_t GetContext() const { return context_; }
+ void SetContext(intptr_t context) { context_ = context; }
+
Smi* GetState() const { return state_; }
void SetState(Smi* state) { state_ = state; }
void SetContinuation(intptr_t pc) { continuation_ = pc; }
-#ifdef DEBUG
- Code::Kind GetKind() const { return kind_; }
- void SetKind(Code::Kind kind) { kind_ = kind; }
-#endif
+ StackFrame::Type GetFrameType() const { return type_; }
+ void SetFrameType(StackFrame::Type type) { type_ = type; }
// Get the incoming arguments count.
int ComputeParametersCount();
// Get a parameter value for an unoptimized frame.
- Object* GetParameter(Deoptimizer* deoptimizer, int index);
+ Object* GetParameter(int index);
// Get the expression stack height for a unoptimized frame.
- unsigned GetExpressionCount(Deoptimizer* deoptimizer);
+ unsigned GetExpressionCount();
// Get the expression stack value for an unoptimized frame.
- Object* GetExpression(Deoptimizer* deoptimizer, int index);
+ Object* GetExpression(int index);
static int registers_offset() {
return OFFSET_OF(FrameDescription, registers_);
@@ -481,6 +488,8 @@ class FrameDescription {
intptr_t top_;
intptr_t pc_;
intptr_t fp_;
+ intptr_t context_;
+ StackFrame::Type type_;
Smi* state_;
#ifdef DEBUG
Code::Kind kind_;
@@ -499,6 +508,8 @@ class FrameDescription {
return reinterpret_cast<intptr_t*>(
reinterpret_cast<Address>(this) + frame_content_offset() + offset);
}
+
+ int ComputeFixedSize();
};
@@ -541,7 +552,9 @@ class Translation BASE_EMBEDDED {
public:
enum Opcode {
BEGIN,
- FRAME,
+ JS_FRAME,
+ CONSTRUCT_STUB_FRAME,
+ ARGUMENTS_ADAPTOR_FRAME,
REGISTER,
INT32_REGISTER,
DOUBLE_REGISTER,
@@ -556,17 +569,20 @@ class Translation BASE_EMBEDDED {
DUPLICATE
};
- Translation(TranslationBuffer* buffer, int frame_count)
+ Translation(TranslationBuffer* buffer, int frame_count, int jsframe_count)
: buffer_(buffer),
index_(buffer->CurrentIndex()) {
buffer_->Add(BEGIN);
buffer_->Add(frame_count);
+ buffer_->Add(jsframe_count);
}
int index() const { return index_; }
// Commands.
- void BeginFrame(int node_id, int literal_id, unsigned height);
+ void BeginJSFrame(int node_id, int literal_id, unsigned height);
+ void BeginArgumentsAdaptorFrame(int literal_id, unsigned height);
+ void BeginConstructStubFrame(int literal_id, unsigned height);
void StoreRegister(Register reg);
void StoreInt32Register(Register reg);
void StoreDoubleRegister(DoubleRegister reg);
@@ -656,9 +672,10 @@ class SlotRef BASE_EMBEDDED {
}
}
- static void ComputeSlotMappingForArguments(JavaScriptFrame* frame,
- int inlined_frame_index,
- Vector<SlotRef>* args_slots);
+ static Vector<SlotRef> ComputeSlotMappingForArguments(
+ JavaScriptFrame* frame,
+ int inlined_frame_index,
+ int formal_parameter_count);
private:
Address addr_;
@@ -678,6 +695,12 @@ class SlotRef BASE_EMBEDDED {
static SlotRef ComputeSlotForNextArgument(TranslationIterator* iterator,
DeoptimizationInputData* data,
JavaScriptFrame* frame);
+
+ static void ComputeSlotsForArguments(
+ Vector<SlotRef>* args_slots,
+ TranslationIterator* iterator,
+ DeoptimizationInputData* data,
+ JavaScriptFrame* frame);
};
@@ -686,9 +709,14 @@ class SlotRef BASE_EMBEDDED {
// needs to inspect a frame that is part of an optimized frame. The
// internally used FrameDescription objects are not GC safe so for use
// by the debugger frame information is copied to an object of this type.
+// Represents parameters in unadapted form so their number might mismatch
+// formal parameter count.
class DeoptimizedFrameInfo : public Malloced {
public:
- DeoptimizedFrameInfo(Deoptimizer* deoptimizer, int frame_index);
+ DeoptimizedFrameInfo(Deoptimizer* deoptimizer,
+ int frame_index,
+ bool has_arguments_adaptor,
+ bool has_construct_stub);
virtual ~DeoptimizedFrameInfo();
// GC support.
@@ -705,6 +733,12 @@ class DeoptimizedFrameInfo : public Malloced {
return function_;
}
+ // Check if this frame is preceded by construct stub frame. The bottom-most
+ // inlined frame might still be called by an uninlined construct stub.
+ bool HasConstructStub() {
+ return has_construct_stub_;
+ }
+
// Get an incoming argument.
Object* GetParameter(int index) {
ASSERT(0 <= index && index < parameters_count());
@@ -717,12 +751,11 @@ class DeoptimizedFrameInfo : public Malloced {
return expression_stack_[index];
}
- private:
- // Set the frame function.
- void SetFunction(JSFunction* function) {
- function_ = function;
+ int GetSourcePosition() {
+ return source_position_;
}
+ private:
// Set an incoming argument.
void SetParameter(int index, Object* obj) {
ASSERT(0 <= index && index < parameters_count());
@@ -736,10 +769,12 @@ class DeoptimizedFrameInfo : public Malloced {
}
JSFunction* function_;
+ bool has_construct_stub_;
int parameters_count_;
int expression_count_;
Object** parameters_;
Object** expression_stack_;
+ int source_position_;
friend class Deoptimizer;
};
diff --git a/src/3rdparty/v8/src/double.h b/src/3rdparty/v8/src/double.h
index 65eded9..16a3245 100644
--- a/src/3rdparty/v8/src/double.h
+++ b/src/3rdparty/v8/src/double.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,8 +34,8 @@ namespace v8 {
namespace internal {
// We assume that doubles and uint64_t have the same endianness.
-static uint64_t double_to_uint64(double d) { return BitCast<uint64_t>(d); }
-static double uint64_to_double(uint64_t d64) { return BitCast<double>(d64); }
+inline uint64_t double_to_uint64(double d) { return BitCast<uint64_t>(d); }
+inline double uint64_to_double(uint64_t d64) { return BitCast<double>(d64); }
// Helper functions for doubles.
class Double {
diff --git a/src/3rdparty/v8/src/dtoa.h b/src/3rdparty/v8/src/dtoa.h
index b3e79af..948a079 100644
--- a/src/3rdparty/v8/src/dtoa.h
+++ b/src/3rdparty/v8/src/dtoa.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -47,9 +47,9 @@ enum DtoaMode {
// The maximal length of digits a double can have in base 10.
// Note that DoubleToAscii null-terminates its input. So the given buffer should
// be at least kBase10MaximalLength + 1 characters long.
-static const int kBase10MaximalLength = 17;
+const int kBase10MaximalLength = 17;
-// Converts the given double 'v' to ascii.
+// Converts the given double 'v' to ASCII.
// The result should be interpreted as buffer * 10^(point-length).
//
// The output depends on the given mode:
diff --git a/src/3rdparty/v8/src/elements.cc b/src/3rdparty/v8/src/elements.cc
index 5e7a84e..26d3dc1 100644
--- a/src/3rdparty/v8/src/elements.cc
+++ b/src/3rdparty/v8/src/elements.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,14 +31,85 @@
#include "elements.h"
#include "utils.h"
+
+// Each concrete ElementsAccessor can handle exactly one ElementsKind,
+// several abstract ElementsAccessor classes are used to allow sharing
+// common code.
+//
+// Inheritance hierarchy:
+// - ElementsAccessorBase (abstract)
+// - FastElementsAccessor (abstract)
+// - FastObjectElementsAccessor
+// - FastDoubleElementsAccessor
+// - ExternalElementsAccessor (abstract)
+// - ExternalByteElementsAccessor
+// - ExternalUnsignedByteElementsAccessor
+// - ExternalShortElementsAccessor
+// - ExternalUnsignedShortElementsAccessor
+// - ExternalIntElementsAccessor
+// - ExternalUnsignedIntElementsAccessor
+// - ExternalFloatElementsAccessor
+// - ExternalDoubleElementsAccessor
+// - PixelElementsAccessor
+// - DictionaryElementsAccessor
+// - NonStrictArgumentsElementsAccessor
+
+
namespace v8 {
namespace internal {
+// First argument in list is the accessor class, the second argument is the
+// accessor ElementsKind, and the third is the backing store class. Use the
+// fast element handler for smi-only arrays. The implementation is currently
+// identical. Note that the order must match that of the ElementsKind enum for
+// the |accessor_array[]| below to work.
+#define ELEMENTS_LIST(V) \
+ V(FastObjectElementsAccessor, FAST_SMI_ONLY_ELEMENTS, FixedArray) \
+ V(FastObjectElementsAccessor, FAST_ELEMENTS, FixedArray) \
+ V(FastDoubleElementsAccessor, FAST_DOUBLE_ELEMENTS, FixedDoubleArray) \
+ V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, \
+ SeededNumberDictionary) \
+ V(NonStrictArgumentsElementsAccessor, NON_STRICT_ARGUMENTS_ELEMENTS, \
+ FixedArray) \
+ V(ExternalByteElementsAccessor, EXTERNAL_BYTE_ELEMENTS, \
+ ExternalByteArray) \
+ V(ExternalUnsignedByteElementsAccessor, \
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS, ExternalUnsignedByteArray) \
+ V(ExternalShortElementsAccessor, EXTERNAL_SHORT_ELEMENTS, \
+ ExternalShortArray) \
+ V(ExternalUnsignedShortElementsAccessor, \
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS, ExternalUnsignedShortArray) \
+ V(ExternalIntElementsAccessor, EXTERNAL_INT_ELEMENTS, \
+ ExternalIntArray) \
+ V(ExternalUnsignedIntElementsAccessor, \
+ EXTERNAL_UNSIGNED_INT_ELEMENTS, ExternalUnsignedIntArray) \
+ V(ExternalFloatElementsAccessor, \
+ EXTERNAL_FLOAT_ELEMENTS, ExternalFloatArray) \
+ V(ExternalDoubleElementsAccessor, \
+ EXTERNAL_DOUBLE_ELEMENTS, ExternalDoubleArray) \
+ V(PixelElementsAccessor, EXTERNAL_PIXEL_ELEMENTS, ExternalPixelArray)
+
+
+template<ElementsKind Kind> class ElementsKindTraits {
+ public:
+ typedef FixedArrayBase BackingStore;
+};
+
+#define ELEMENTS_TRAITS(Class, KindParam, Store) \
+template<> class ElementsKindTraits<KindParam> { \
+ public: \
+ static const ElementsKind Kind = KindParam; \
+ typedef Store BackingStore; \
+};
+ELEMENTS_LIST(ELEMENTS_TRAITS)
+#undef ELEMENTS_TRAITS
+
+
ElementsAccessor** ElementsAccessor::elements_accessors_;
-bool HasKey(FixedArray* array, Object* key) {
+static bool HasKey(FixedArray* array, Object* key) {
int len0 = array->length();
for (int i = 0; i < len0; i++) {
Object* element = array->get(i);
@@ -52,6 +123,258 @@ bool HasKey(FixedArray* array, Object* key) {
}
+static Failure* ThrowArrayLengthRangeError(Heap* heap) {
+ HandleScope scope(heap->isolate());
+ return heap->isolate()->Throw(
+ *heap->isolate()->factory()->NewRangeError("invalid_array_length",
+ HandleVector<Object>(NULL, 0)));
+}
+
+
+void CopyObjectToObjectElements(FixedArray* from,
+ ElementsKind from_kind,
+ uint32_t from_start,
+ FixedArray* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int raw_copy_size) {
+ ASSERT(to->map() != HEAP->fixed_cow_array_map());
+ ASSERT(from_kind == FAST_ELEMENTS || from_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = Min(from->length() - from_start,
+ to->length() - to_start);
+#ifdef DEBUG
+ // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // marked with the hole.
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ ASSERT(to->get(i)->IsTheHole());
+ }
+ }
+#endif
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ Address to_address = to->address() + FixedArray::kHeaderSize;
+ Address from_address = from->address() + FixedArray::kHeaderSize;
+ CopyWords(reinterpret_cast<Object**>(to_address) + to_start,
+ reinterpret_cast<Object**>(from_address) + from_start,
+ copy_size);
+ if (from_kind == FAST_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ Heap* heap = from->GetHeap();
+ if (!heap->InNewSpace(to)) {
+ heap->RecordWrites(to->address(),
+ to->OffsetOfElementAt(to_start),
+ copy_size);
+ }
+ heap->incremental_marking()->RecordWrites(to);
+ }
+}
+
+
+static void CopyDictionaryToObjectElements(SeededNumberDictionary* from,
+ uint32_t from_start,
+ FixedArray* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ Heap* heap = from->GetHeap();
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->max_number_key() + 1 - from_start;
+#ifdef DEBUG
+ // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // marked with the hole.
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ ASSERT(to->get(i)->IsTheHole());
+ }
+ }
+#endif
+ }
+ ASSERT(to != from);
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ if (copy_size == 0) return;
+ uint32_t to_length = to->length();
+ if (to_start + copy_size > to_length) {
+ copy_size = to_length - to_start;
+ }
+ for (int i = 0; i < copy_size; i++) {
+ int entry = from->FindEntry(i + from_start);
+ if (entry != SeededNumberDictionary::kNotFound) {
+ Object* value = from->ValueAt(entry);
+ ASSERT(!value->IsTheHole());
+ to->set(i + to_start, value, SKIP_WRITE_BARRIER);
+ } else {
+ to->set_the_hole(i + to_start);
+ }
+ }
+ if (to_kind == FAST_ELEMENTS) {
+ if (!heap->InNewSpace(to)) {
+ heap->RecordWrites(to->address(),
+ to->OffsetOfElementAt(to_start),
+ copy_size);
+ }
+ heap->incremental_marking()->RecordWrites(to);
+ }
+}
+
+
+MUST_USE_RESULT static MaybeObject* CopyDoubleToObjectElements(
+ FixedDoubleArray* from,
+ uint32_t from_start,
+ FixedArray* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int raw_copy_size) {
+ ASSERT(to_kind == FAST_ELEMENTS || to_kind == FAST_SMI_ONLY_ELEMENTS);
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = Min(from->length() - from_start,
+ to->length() - to_start);
+#ifdef DEBUG
+ // FAST_ELEMENT arrays cannot be uninitialized. Ensure they are already
+ // marked with the hole.
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ ASSERT(to->get(i)->IsTheHole());
+ }
+ }
+#endif
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return from;
+ for (int i = 0; i < copy_size; ++i) {
+ if (to_kind == FAST_SMI_ONLY_ELEMENTS) {
+ UNIMPLEMENTED();
+ return Failure::Exception();
+ } else {
+ MaybeObject* maybe_value = from->get(i + from_start);
+ Object* value;
+ ASSERT(to_kind == FAST_ELEMENTS);
+ // Because FAST_DOUBLE_ELEMENTS -> FAST_ELEMENT allocate HeapObjects
+ // iteratively, the allocate must succeed within a single GC cycle,
+ // otherwise the retry after the GC will also fail. In order to ensure
+ // that no GC is triggered, allocate HeapNumbers from old space if they
+ // can't be taken from new space.
+ if (!maybe_value->ToObject(&value)) {
+ ASSERT(maybe_value->IsRetryAfterGC() || maybe_value->IsOutOfMemory());
+ Heap* heap = from->GetHeap();
+ MaybeObject* maybe_value_object =
+ heap->AllocateHeapNumber(from->get_scalar(i + from_start),
+ TENURED);
+ if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
+ }
+ to->set(i + to_start, value, UPDATE_WRITE_BARRIER);
+ }
+ }
+ return to;
+}
+
+
+static void CopyDoubleToDoubleElements(FixedDoubleArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = Min(from->length() - from_start,
+ to->length() - to_start);
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ to->set_the_hole(i);
+ }
+ }
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ Address to_address = to->address() + FixedDoubleArray::kHeaderSize;
+ Address from_address = from->address() + FixedDoubleArray::kHeaderSize;
+ to_address += kDoubleSize * to_start;
+ from_address += kDoubleSize * from_start;
+ int words_per_double = (kDoubleSize / kPointerSize);
+ CopyWords(reinterpret_cast<Object**>(to_address),
+ reinterpret_cast<Object**>(from_address),
+ words_per_double * copy_size);
+}
+
+
+static void CopyObjectToDoubleElements(FixedArray* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ if (raw_copy_size < 0) {
+ ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
+ raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->length() - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ to->set_the_hole(i);
+ }
+ }
+ }
+ ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
+ (copy_size + static_cast<int>(from_start)) <= from->length());
+ if (copy_size == 0) return;
+ for (int i = 0; i < copy_size; i++) {
+ Object* hole_or_object = from->get(i + from_start);
+ if (hole_or_object->IsTheHole()) {
+ to->set_the_hole(i + to_start);
+ } else {
+ to->set(i + to_start, hole_or_object->Number());
+ }
+ }
+}
+
+
+static void CopyDictionaryToDoubleElements(SeededNumberDictionary* from,
+ uint32_t from_start,
+ FixedDoubleArray* to,
+ uint32_t to_start,
+ int raw_copy_size) {
+ int copy_size = raw_copy_size;
+ if (copy_size < 0) {
+ ASSERT(copy_size == ElementsAccessor::kCopyToEnd ||
+ copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
+ copy_size = from->max_number_key() + 1 - from_start;
+ if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
+ for (int i = to_start + copy_size; i < to->length(); ++i) {
+ to->set_the_hole(i);
+ }
+ }
+ }
+ if (copy_size == 0) return;
+ uint32_t to_length = to->length();
+ if (to_start + copy_size > to_length) {
+ copy_size = to_length - to_start;
+ }
+ for (int i = 0; i < copy_size; i++) {
+ int entry = from->FindEntry(i + from_start);
+ if (entry != SeededNumberDictionary::kNotFound) {
+ to->set(i + to_start, from->ValueAt(entry)->Number());
+ } else {
+ to->set_the_hole(i + to_start);
+ }
+ }
+}
+
+
// Base class for element handler implementations. Contains the
// the common logic for objects with different ElementsKinds.
// Subclasses must specialize method for which the element
@@ -69,36 +392,119 @@ bool HasKey(FixedArray* array, Object* key) {
// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
// specialization of SomeElementsAccessor methods).
-template <typename ElementsAccessorSubclass, typename BackingStoreClass>
+template <typename ElementsAccessorSubclass,
+ typename ElementsTraitsParam>
class ElementsAccessorBase : public ElementsAccessor {
protected:
- ElementsAccessorBase() { }
- virtual MaybeObject* Get(FixedArrayBase* backing_store,
- uint32_t key,
- JSObject* obj,
- Object* receiver) {
- return ElementsAccessorSubclass::Get(
- BackingStoreClass::cast(backing_store), key, obj, receiver);
+ explicit ElementsAccessorBase(const char* name)
+ : ElementsAccessor(name) { }
+
+ typedef ElementsTraitsParam ElementsTraits;
+ typedef typename ElementsTraitsParam::BackingStore BackingStore;
+
+ virtual ElementsKind kind() const { return ElementsTraits::Kind; }
+
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ BackingStore* backing_store) {
+ MaybeObject* element =
+ ElementsAccessorSubclass::GetImpl(receiver, holder, key, backing_store);
+ return !element->IsTheHole();
}
- static MaybeObject* Get(BackingStoreClass* backing_store,
+ virtual bool HasElement(Object* receiver,
+ JSObject* holder,
uint32_t key,
- JSObject* obj,
- Object* receiver) {
- if (key < ElementsAccessorSubclass::GetCapacity(backing_store)) {
- return backing_store->get(key);
+ FixedArrayBase* backing_store) {
+ if (backing_store == NULL) {
+ backing_store = holder->elements();
}
- return backing_store->GetHeap()->the_hole_value();
+ return ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, BackingStore::cast(backing_store));
+ }
+
+ virtual MaybeObject* Get(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store) {
+ if (backing_store == NULL) {
+ backing_store = holder->elements();
+ }
+ return ElementsAccessorSubclass::GetImpl(
+ receiver, holder, key, BackingStore::cast(backing_store));
+ }
+
+ static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
+ return (key < ElementsAccessorSubclass::GetCapacityImpl(backing_store))
+ ? backing_store->get(key)
+ : backing_store->GetHeap()->the_hole_value();
+ }
+
+ virtual MaybeObject* SetLength(JSArray* array,
+ Object* length) {
+ return ElementsAccessorSubclass::SetLengthImpl(
+ array, length, BackingStore::cast(array->elements()));
+ }
+
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ BackingStore* backing_store);
+
+ virtual MaybeObject* SetCapacityAndLength(JSArray* array,
+ int capacity,
+ int length) {
+ return ElementsAccessorSubclass::SetFastElementsCapacityAndLength(
+ array,
+ capacity,
+ length);
+ }
+
+ static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
+ int capacity,
+ int length) {
+ UNIMPLEMENTED();
+ return obj;
}
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
JSReceiver::DeleteMode mode) = 0;
- virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
- FixedArray* to,
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ UNREACHABLE();
+ return NULL;
+ }
+
+ virtual MaybeObject* CopyElements(JSObject* from_holder,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size,
+ FixedArrayBase* from) {
+ if (from == NULL) {
+ from = from_holder->elements();
+ }
+ if (from->length() == 0) {
+ return from;
+ }
+ return ElementsAccessorSubclass::CopyElementsImpl(
+ from, from_start, to, to_kind, to_start, copy_size);
+ }
+
+ virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
JSObject* holder,
- Object* receiver) {
+ FixedArray* to,
+ FixedArrayBase* from) {
int len0 = to->length();
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
@@ -107,24 +513,26 @@ class ElementsAccessorBase : public ElementsAccessor {
}
}
#endif
- BackingStoreClass* backing_store = BackingStoreClass::cast(from);
- uint32_t len1 = ElementsAccessorSubclass::GetCapacity(backing_store);
+ if (from == NULL) {
+ from = holder->elements();
+ }
+ BackingStore* backing_store = BackingStore::cast(from);
+ uint32_t len1 = ElementsAccessorSubclass::GetCapacityImpl(backing_store);
// Optimize if 'other' is empty.
// We cannot optimize if 'this' is empty, as other may have holes.
if (len1 == 0) return to;
// Compute how many elements are not in other.
- int extra = 0;
+ uint32_t extra = 0;
for (uint32_t y = 0; y < len1; y++) {
- if (ElementsAccessorSubclass::HasElementAtIndex(backing_store,
- y,
- holder,
- receiver)) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndex(backing_store, y);
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ if (ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, backing_store)) {
MaybeObject* maybe_value =
- ElementsAccessorSubclass::Get(backing_store, key, holder, receiver);
+ ElementsAccessorSubclass::GetImpl(receiver, holder,
+ key, backing_store);
Object* value;
if (!maybe_value->ToObject(&value)) return maybe_value;
ASSERT(!value->IsTheHole());
@@ -153,16 +561,15 @@ class ElementsAccessorBase : public ElementsAccessor {
}
}
// Fill in the extra values.
- int index = 0;
+ uint32_t index = 0;
for (uint32_t y = 0; y < len1; y++) {
- if (ElementsAccessorSubclass::HasElementAtIndex(backing_store,
- y,
- holder,
- receiver)) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndex(backing_store, y);
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndexImpl(backing_store, y);
+ if (ElementsAccessorSubclass::HasElementImpl(
+ receiver, holder, key, backing_store)) {
MaybeObject* maybe_value =
- ElementsAccessorSubclass::Get(backing_store, key, holder, receiver);
+ ElementsAccessorSubclass::GetImpl(receiver, holder,
+ key, backing_store);
Object* value;
if (!maybe_value->ToObject(&value)) return maybe_value;
if (!value->IsTheHole() && !HasKey(to, value)) {
@@ -176,45 +583,24 @@ class ElementsAccessorBase : public ElementsAccessor {
}
protected:
- static uint32_t GetCapacity(BackingStoreClass* backing_store) {
+ static uint32_t GetCapacityImpl(BackingStore* backing_store) {
return backing_store->length();
}
virtual uint32_t GetCapacity(FixedArrayBase* backing_store) {
- return ElementsAccessorSubclass::GetCapacity(
- BackingStoreClass::cast(backing_store));
- }
-
- static bool HasElementAtIndex(BackingStoreClass* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- uint32_t key =
- ElementsAccessorSubclass::GetKeyForIndex(backing_store, index);
- MaybeObject* element = ElementsAccessorSubclass::Get(backing_store,
- key,
- holder,
- receiver);
- return !element->IsTheHole();
- }
-
- virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- return ElementsAccessorSubclass::HasElementAtIndex(
- BackingStoreClass::cast(backing_store), index, holder, receiver);
+ return ElementsAccessorSubclass::GetCapacityImpl(
+ BackingStore::cast(backing_store));
}
- static uint32_t GetKeyForIndex(BackingStoreClass* backing_store,
- uint32_t index) {
+ static uint32_t GetKeyForIndexImpl(BackingStore* backing_store,
+ uint32_t index) {
return index;
}
virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
- uint32_t index) {
- return ElementsAccessorSubclass::GetKeyForIndex(
- BackingStoreClass::cast(backing_store), index);
+ uint32_t index) {
+ return ElementsAccessorSubclass::GetKeyForIndexImpl(
+ BackingStore::cast(backing_store), index);
}
private:
@@ -222,9 +608,82 @@ class ElementsAccessorBase : public ElementsAccessor {
};
+// Super class for all fast element arrays.
+template<typename FastElementsAccessorSubclass,
+ typename KindTraits,
+ int ElementSize>
class FastElementsAccessor
- : public ElementsAccessorBase<FastElementsAccessor, FixedArray> {
+ : public ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits> {
public:
+ explicit FastElementsAccessor(const char* name)
+ : ElementsAccessorBase<FastElementsAccessorSubclass,
+ KindTraits>(name) {}
+ protected:
+ friend class ElementsAccessorBase<FastElementsAccessorSubclass, KindTraits>;
+
+ typedef typename KindTraits::BackingStore BackingStore;
+
+ // Adjusts the length of the fast backing store or returns the new length or
+ // undefined in case conversion to a slow backing store should be performed.
+ static MaybeObject* SetLengthWithoutNormalize(BackingStore* backing_store,
+ JSArray* array,
+ Object* length_object,
+ uint32_t length) {
+ uint32_t old_capacity = backing_store->length();
+
+ // Check whether the backing store should be shrunk.
+ if (length <= old_capacity) {
+ if (array->HasFastTypeElements()) {
+ MaybeObject* maybe_obj = array->EnsureWritableFastElements();
+ if (!maybe_obj->To(&backing_store)) return maybe_obj;
+ }
+ if (2 * length <= old_capacity) {
+ // If more than half the elements won't be used, trim the array.
+ if (length == 0) {
+ array->initialize_elements();
+ } else {
+ backing_store->set_length(length);
+ Address filler_start = backing_store->address() +
+ BackingStore::OffsetOfElementAt(length);
+ int filler_size = (old_capacity - length) * ElementSize;
+ array->GetHeap()->CreateFillerObjectAt(filler_start, filler_size);
+ }
+ } else {
+ // Otherwise, fill the unused tail with holes.
+ int old_length = FastD2I(array->length()->Number());
+ for (int i = length; i < old_length; i++) {
+ backing_store->set_the_hole(i);
+ }
+ }
+ return length_object;
+ }
+
+ // Check whether the backing store should be expanded.
+ uint32_t min = JSObject::NewElementsCapacity(old_capacity);
+ uint32_t new_capacity = length > min ? length : min;
+ if (!array->ShouldConvertToSlowElements(new_capacity)) {
+ MaybeObject* result = FastElementsAccessorSubclass::
+ SetFastElementsCapacityAndLength(array, new_capacity, length);
+ if (result->IsFailure()) return result;
+ return length_object;
+ }
+
+ // Request conversion to slow elements.
+ return array->GetHeap()->undefined_value();
+ }
+};
+
+
+class FastObjectElementsAccessor
+ : public FastElementsAccessor<FastObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS>,
+ kPointerSize> {
+ public:
+ explicit FastObjectElementsAccessor(const char* name)
+ : FastElementsAccessor<FastObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS>,
+ kPointerSize>(name) {}
+
static MaybeObject* DeleteCommon(JSObject* obj,
uint32_t key) {
ASSERT(obj->HasFastElements() ||
@@ -271,7 +730,49 @@ class FastElementsAccessor
return heap->true_value();
}
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS: {
+ CopyObjectToObjectElements(
+ FixedArray::cast(from), ElementsTraits::Kind, from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ return from;
+ }
+ case FAST_DOUBLE_ELEMENTS:
+ CopyObjectToDoubleElements(
+ FixedArray::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start, copy_size);
+ return from;
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
+
+
+ static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
+ uint32_t capacity,
+ uint32_t length) {
+ JSObject::SetFastElementsCapacityMode set_capacity_mode =
+ obj->HasFastSmiOnlyElements()
+ ? JSObject::kAllowSmiOnlyElements
+ : JSObject::kDontAllowSmiOnlyElements;
+ return obj->SetFastElementsCapacityAndLength(capacity,
+ length,
+ set_capacity_mode);
+ }
+
protected:
+ friend class FastElementsAccessor<FastObjectElementsAccessor,
+ ElementsKindTraits<FAST_ELEMENTS>,
+ kPointerSize>;
+
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
JSReceiver::DeleteMode mode) {
@@ -281,11 +782,50 @@ class FastElementsAccessor
class FastDoubleElementsAccessor
- : public ElementsAccessorBase<FastDoubleElementsAccessor,
- FixedDoubleArray> {
+ : public FastElementsAccessor<FastDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ kDoubleSize> {
+ public:
+ explicit FastDoubleElementsAccessor(const char* name)
+ : FastElementsAccessor<FastDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ kDoubleSize>(name) {}
+
+ static MaybeObject* SetFastElementsCapacityAndLength(JSObject* obj,
+ uint32_t capacity,
+ uint32_t length) {
+ return obj->SetFastDoubleElementsCapacityAndLength(capacity, length);
+ }
+
protected:
friend class ElementsAccessorBase<FastDoubleElementsAccessor,
- FixedDoubleArray>;
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS> >;
+ friend class FastElementsAccessor<FastDoubleElementsAccessor,
+ ElementsKindTraits<FAST_DOUBLE_ELEMENTS>,
+ kDoubleSize>;
+
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS:
+ return CopyDoubleToObjectElements(
+ FixedDoubleArray::cast(from), from_start, FixedArray::cast(to),
+ to_kind, to_start, copy_size);
+ case FAST_DOUBLE_ELEMENTS:
+ CopyDoubleToDoubleElements(FixedDoubleArray::cast(from), from_start,
+ FixedDoubleArray::cast(to),
+ to_start, copy_size);
+ return from;
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
@@ -299,34 +839,49 @@ class FastDoubleElementsAccessor
return obj->GetHeap()->true_value();
}
- static bool HasElementAtIndex(FixedDoubleArray* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- return !backing_store->is_the_hole(index);
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedDoubleArray* backing_store) {
+ return key < static_cast<uint32_t>(backing_store->length()) &&
+ !backing_store->is_the_hole(key);
}
};
// Super class for all external element arrays.
template<typename ExternalElementsAccessorSubclass,
- typename ExternalArray>
+ ElementsKind Kind>
class ExternalElementsAccessor
: public ElementsAccessorBase<ExternalElementsAccessorSubclass,
- ExternalArray> {
+ ElementsKindTraits<Kind> > {
+ public:
+ explicit ExternalElementsAccessor(const char* name)
+ : ElementsAccessorBase<ExternalElementsAccessorSubclass,
+ ElementsKindTraits<Kind> >(name) {}
+
protected:
+ typedef typename ElementsKindTraits<Kind>::BackingStore BackingStore;
+
friend class ElementsAccessorBase<ExternalElementsAccessorSubclass,
- ExternalArray>;
+ ElementsKindTraits<Kind> >;
- static MaybeObject* Get(ExternalArray* backing_store,
- uint32_t key,
- JSObject* obj,
- Object* receiver) {
- if (key < ExternalElementsAccessorSubclass::GetCapacity(backing_store)) {
- return backing_store->get(key);
- } else {
- return backing_store->GetHeap()->undefined_value();
- }
+ static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ BackingStore* backing_store) {
+ return
+ key < ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store)
+ ? backing_store->get(key)
+ : backing_store->GetHeap()->undefined_value();
+ }
+
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ BackingStore* backing_store) {
+ // External arrays do not support changing their length.
+ UNREACHABLE();
+ return obj;
}
virtual MaybeObject* Delete(JSObject* obj,
@@ -335,67 +890,173 @@ class ExternalElementsAccessor
// External arrays always ignore deletes.
return obj->GetHeap()->true_value();
}
+
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ BackingStore* backing_store) {
+ uint32_t capacity =
+ ExternalElementsAccessorSubclass::GetCapacityImpl(backing_store);
+ return key < capacity;
+ }
};
class ExternalByteElementsAccessor
: public ExternalElementsAccessor<ExternalByteElementsAccessor,
- ExternalByteArray> {
+ EXTERNAL_BYTE_ELEMENTS> {
+ public:
+ explicit ExternalByteElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalByteElementsAccessor,
+ EXTERNAL_BYTE_ELEMENTS>(name) {}
};
class ExternalUnsignedByteElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
- ExternalUnsignedByteArray> {
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedByteElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
+ EXTERNAL_UNSIGNED_BYTE_ELEMENTS>(name) {}
};
class ExternalShortElementsAccessor
: public ExternalElementsAccessor<ExternalShortElementsAccessor,
- ExternalShortArray> {
+ EXTERNAL_SHORT_ELEMENTS> {
+ public:
+ explicit ExternalShortElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalShortElementsAccessor,
+ EXTERNAL_SHORT_ELEMENTS>(name) {}
};
class ExternalUnsignedShortElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
- ExternalUnsignedShortArray> {
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedShortElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
+ EXTERNAL_UNSIGNED_SHORT_ELEMENTS>(name) {}
};
class ExternalIntElementsAccessor
: public ExternalElementsAccessor<ExternalIntElementsAccessor,
- ExternalIntArray> {
+ EXTERNAL_INT_ELEMENTS> {
+ public:
+ explicit ExternalIntElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalIntElementsAccessor,
+ EXTERNAL_INT_ELEMENTS>(name) {}
};
class ExternalUnsignedIntElementsAccessor
: public ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
- ExternalUnsignedIntArray> {
+ EXTERNAL_UNSIGNED_INT_ELEMENTS> {
+ public:
+ explicit ExternalUnsignedIntElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
+ EXTERNAL_UNSIGNED_INT_ELEMENTS>(name) {}
};
class ExternalFloatElementsAccessor
: public ExternalElementsAccessor<ExternalFloatElementsAccessor,
- ExternalFloatArray> {
+ EXTERNAL_FLOAT_ELEMENTS> {
+ public:
+ explicit ExternalFloatElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalFloatElementsAccessor,
+ EXTERNAL_FLOAT_ELEMENTS>(name) {}
};
class ExternalDoubleElementsAccessor
: public ExternalElementsAccessor<ExternalDoubleElementsAccessor,
- ExternalDoubleArray> {
+ EXTERNAL_DOUBLE_ELEMENTS> {
+ public:
+ explicit ExternalDoubleElementsAccessor(const char* name)
+ : ExternalElementsAccessor<ExternalDoubleElementsAccessor,
+ EXTERNAL_DOUBLE_ELEMENTS>(name) {}
};
class PixelElementsAccessor
: public ExternalElementsAccessor<PixelElementsAccessor,
- ExternalPixelArray> {
+ EXTERNAL_PIXEL_ELEMENTS> {
+ public:
+ explicit PixelElementsAccessor(const char* name)
+ : ExternalElementsAccessor<PixelElementsAccessor,
+ EXTERNAL_PIXEL_ELEMENTS>(name) {}
};
class DictionaryElementsAccessor
: public ElementsAccessorBase<DictionaryElementsAccessor,
- NumberDictionary> {
+ ElementsKindTraits<DICTIONARY_ELEMENTS> > {
public:
+ explicit DictionaryElementsAccessor(const char* name)
+ : ElementsAccessorBase<DictionaryElementsAccessor,
+ ElementsKindTraits<DICTIONARY_ELEMENTS> >(name) {}
+
+ // Adjusts the length of the dictionary backing store and returns the new
+ // length according to ES5 section 15.4.5.2 behavior.
+ static MaybeObject* SetLengthWithoutNormalize(SeededNumberDictionary* dict,
+ JSArray* array,
+ Object* length_object,
+ uint32_t length) {
+ if (length == 0) {
+ // If the length of a slow array is reset to zero, we clear
+ // the array and flush backing storage. This has the added
+ // benefit that the array returns to fast mode.
+ Object* obj;
+ MaybeObject* maybe_obj = array->ResetElements();
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ } else {
+ uint32_t new_length = length;
+ uint32_t old_length = static_cast<uint32_t>(array->length()->Number());
+ if (new_length < old_length) {
+ // Find last non-deletable element in range of elements to be
+ // deleted and adjust range accordingly.
+ Heap* heap = array->GetHeap();
+ int capacity = dict->Capacity();
+ for (int i = 0; i < capacity; i++) {
+ Object* key = dict->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t number = static_cast<uint32_t>(key->Number());
+ if (new_length <= number && number < old_length) {
+ PropertyDetails details = dict->DetailsAt(i);
+ if (details.IsDontDelete()) new_length = number + 1;
+ }
+ }
+ }
+ if (new_length != length) {
+ MaybeObject* maybe_object = heap->NumberFromUint32(new_length);
+ if (!maybe_object->To(&length_object)) return maybe_object;
+ }
+
+ // Remove elements that should be deleted.
+ int removed_entries = 0;
+ Object* the_hole_value = heap->the_hole_value();
+ for (int i = 0; i < capacity; i++) {
+ Object* key = dict->KeyAt(i);
+ if (key->IsNumber()) {
+ uint32_t number = static_cast<uint32_t>(key->Number());
+ if (new_length <= number && number < old_length) {
+ dict->SetEntry(i, the_hole_value, the_hole_value);
+ removed_entries++;
+ }
+ }
+ }
+
+ // Update the number of elements.
+ dict->ElementsRemoved(removed_entries);
+ }
+ }
+ return length_object;
+ }
+
static MaybeObject* DeleteCommon(JSObject* obj,
uint32_t key,
JSReceiver::DeleteMode mode) {
@@ -407,9 +1068,10 @@ class DictionaryElementsAccessor
if (is_arguments) {
backing_store = FixedArray::cast(backing_store->get(1));
}
- NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(backing_store);
int entry = dictionary->FindEntry(key);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
Object* result = dictionary->DeleteProperty(entry, mode);
if (result == heap->true_value()) {
MaybeObject* maybe_elements = dictionary->Shrink(key);
@@ -440,9 +1102,34 @@ class DictionaryElementsAccessor
return heap->true_value();
}
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ switch (to_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS:
+ CopyDictionaryToObjectElements(
+ SeededNumberDictionary::cast(from), from_start,
+ FixedArray::cast(to), to_kind, to_start, copy_size);
+ return from;
+ case FAST_DOUBLE_ELEMENTS:
+ CopyDictionaryToDoubleElements(
+ SeededNumberDictionary::cast(from), from_start,
+ FixedDoubleArray::cast(to), to_start, copy_size);
+ return from;
+ default:
+ UNREACHABLE();
+ }
+ return to->GetHeap()->undefined_value();
+ }
+
+
protected:
friend class ElementsAccessorBase<DictionaryElementsAccessor,
- NumberDictionary>;
+ ElementsKindTraits<DICTIONARY_ELEMENTS> >;
virtual MaybeObject* Delete(JSObject* obj,
uint32_t key,
@@ -450,12 +1137,12 @@ class DictionaryElementsAccessor
return DeleteCommon(obj, key, mode);
}
- static MaybeObject* Get(NumberDictionary* backing_store,
- uint32_t key,
- JSObject* obj,
- Object* receiver) {
+ static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
int entry = backing_store->FindEntry(key);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
Object* element = backing_store->ValueAt(entry);
PropertyDetails details = backing_store->DetailsAt(entry);
if (details.type() == CALLBACKS) {
@@ -470,26 +1157,40 @@ class DictionaryElementsAccessor
return obj->GetHeap()->the_hole_value();
}
- static uint32_t GetKeyForIndex(NumberDictionary* dict,
- uint32_t index) {
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ SeededNumberDictionary* backing_store) {
+ return backing_store->FindEntry(key) !=
+ SeededNumberDictionary::kNotFound;
+ }
+
+ static uint32_t GetKeyForIndexImpl(SeededNumberDictionary* dict,
+ uint32_t index) {
Object* key = dict->KeyAt(index);
return Smi::cast(key)->value();
}
};
-class NonStrictArgumentsElementsAccessor
- : public ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
- FixedArray> {
+class NonStrictArgumentsElementsAccessor : public ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> > {
+ public:
+ explicit NonStrictArgumentsElementsAccessor(const char* name)
+ : ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> >(name) {}
protected:
- friend class ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
- FixedArray>;
+ friend class ElementsAccessorBase<
+ NonStrictArgumentsElementsAccessor,
+ ElementsKindTraits<NON_STRICT_ARGUMENTS_ELEMENTS> >;
- static MaybeObject* Get(FixedArray* parameter_map,
- uint32_t key,
- JSObject* obj,
- Object* receiver) {
- Object* probe = GetParameterMapArg(parameter_map, key);
+ static MaybeObject* GetImpl(Object* receiver,
+ JSObject* obj,
+ uint32_t key,
+ FixedArray* parameter_map) {
+ Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
Context* context = Context::cast(parameter_map->get(0));
int context_index = Smi::cast(probe)->value();
@@ -498,19 +1199,37 @@ class NonStrictArgumentsElementsAccessor
} else {
// Object is not mapped, defer to the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- return ElementsAccessor::ForArray(arguments)->Get(arguments,
- key,
- obj,
- receiver);
+ MaybeObject* maybe_result = ElementsAccessor::ForArray(arguments)->Get(
+ receiver, obj, key, arguments);
+ Object* result;
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ // Elements of the arguments object in slow mode might be slow aliases.
+ if (result->IsAliasedArgumentsEntry()) {
+ AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(result);
+ Context* context = Context::cast(parameter_map->get(0));
+ int context_index = entry->aliased_context_slot();
+ ASSERT(!context->get(context_index)->IsTheHole());
+ return context->get(context_index);
+ } else {
+ return result;
+ }
}
}
+ static MaybeObject* SetLengthImpl(JSObject* obj,
+ Object* length,
+ FixedArray* parameter_map) {
+ // TODO(mstarzinger): This was never implemented but will be used once we
+ // correctly implement [[DefineOwnProperty]] on arrays.
+ UNIMPLEMENTED();
+ return obj;
+ }
+
virtual MaybeObject* Delete(JSObject* obj,
- uint32_t key
- ,
+ uint32_t key,
JSReceiver::DeleteMode mode) {
FixedArray* parameter_map = FixedArray::cast(obj->elements());
- Object* probe = GetParameterMapArg(parameter_map, key);
+ Object* probe = GetParameterMapArg(obj, parameter_map, key);
if (!probe->IsTheHole()) {
// TODO(kmillikin): We could check if this was the last aliased
// parameter, and revert to normal elements in that case. That
@@ -521,41 +1240,57 @@ class NonStrictArgumentsElementsAccessor
if (arguments->IsDictionary()) {
return DictionaryElementsAccessor::DeleteCommon(obj, key, mode);
} else {
- return FastElementsAccessor::DeleteCommon(obj, key);
+ return FastObjectElementsAccessor::DeleteCommon(obj, key);
}
}
return obj->GetHeap()->true_value();
}
- static uint32_t GetCapacity(FixedArray* parameter_map) {
+ static MaybeObject* CopyElementsImpl(FixedArrayBase* from,
+ uint32_t from_start,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size) {
+ FixedArray* parameter_map = FixedArray::cast(from);
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
+ return accessor->CopyElements(NULL, from_start, to, to_kind,
+ to_start, copy_size, arguments);
+ }
+
+ static uint32_t GetCapacityImpl(FixedArray* parameter_map) {
FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
return Max(static_cast<uint32_t>(parameter_map->length() - 2),
ForArray(arguments)->GetCapacity(arguments));
}
- static uint32_t GetKeyForIndex(FixedArray* dict,
- uint32_t index) {
+ static uint32_t GetKeyForIndexImpl(FixedArray* dict,
+ uint32_t index) {
return index;
}
- static bool HasElementAtIndex(FixedArray* parameter_map,
- uint32_t index,
- JSObject* holder,
- Object* receiver) {
- Object* probe = GetParameterMapArg(parameter_map, index);
+ static bool HasElementImpl(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArray* parameter_map) {
+ Object* probe = GetParameterMapArg(holder, parameter_map, key);
if (!probe->IsTheHole()) {
return true;
} else {
FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
- return !accessor->Get(arguments, index, holder, receiver)->IsTheHole();
+ return !accessor->Get(receiver, holder, key, arguments)->IsTheHole();
}
}
private:
- static Object* GetParameterMapArg(FixedArray* parameter_map,
+ static Object* GetParameterMapArg(JSObject* holder,
+ FixedArray* parameter_map,
uint32_t key) {
- uint32_t length = parameter_map->length();
+ uint32_t length = holder->IsJSArray()
+ ? Smi::cast(JSArray::cast(holder)->length())->value()
+ : parameter_map->length();
return key < (length - 2 )
? parameter_map->get(key + 2)
: parameter_map->GetHeap()->the_hole_value();
@@ -597,40 +1332,10 @@ ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
void ElementsAccessor::InitializeOncePerProcess() {
- static struct ConcreteElementsAccessors {
- // Use the fast element handler for smi-only arrays. The implementation is
- // currently identical.
- FastElementsAccessor fast_smi_elements_handler;
- FastElementsAccessor fast_elements_handler;
- FastDoubleElementsAccessor fast_double_elements_handler;
- DictionaryElementsAccessor dictionary_elements_handler;
- NonStrictArgumentsElementsAccessor non_strict_arguments_elements_handler;
- ExternalByteElementsAccessor byte_elements_handler;
- ExternalUnsignedByteElementsAccessor unsigned_byte_elements_handler;
- ExternalShortElementsAccessor short_elements_handler;
- ExternalUnsignedShortElementsAccessor unsigned_short_elements_handler;
- ExternalIntElementsAccessor int_elements_handler;
- ExternalUnsignedIntElementsAccessor unsigned_int_elements_handler;
- ExternalFloatElementsAccessor float_elements_handler;
- ExternalDoubleElementsAccessor double_elements_handler;
- PixelElementsAccessor pixel_elements_handler;
- } element_accessors;
-
static ElementsAccessor* accessor_array[] = {
- &element_accessors.fast_smi_elements_handler,
- &element_accessors.fast_elements_handler,
- &element_accessors.fast_double_elements_handler,
- &element_accessors.dictionary_elements_handler,
- &element_accessors.non_strict_arguments_elements_handler,
- &element_accessors.byte_elements_handler,
- &element_accessors.unsigned_byte_elements_handler,
- &element_accessors.short_elements_handler,
- &element_accessors.unsigned_short_elements_handler,
- &element_accessors.int_elements_handler,
- &element_accessors.unsigned_int_elements_handler,
- &element_accessors.float_elements_handler,
- &element_accessors.double_elements_handler,
- &element_accessors.pixel_elements_handler
+#define ACCESSOR_ARRAY(Class, Kind, Store) new Class(#Kind),
+ ELEMENTS_LIST(ACCESSOR_ARRAY)
+#undef ACCESSOR_ARRAY
};
STATIC_ASSERT((sizeof(accessor_array) / sizeof(*accessor_array)) ==
@@ -640,4 +1345,73 @@ void ElementsAccessor::InitializeOncePerProcess() {
}
+void ElementsAccessor::TearDown() {
+#define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind];
+ ELEMENTS_LIST(ACCESSOR_DELETE)
+#undef ACCESSOR_DELETE
+ elements_accessors_ = NULL;
+}
+
+
+template <typename ElementsAccessorSubclass, typename ElementsKindTraits>
+MaybeObject* ElementsAccessorBase<ElementsAccessorSubclass,
+ ElementsKindTraits>::
+ SetLengthImpl(JSObject* obj,
+ Object* length,
+ typename ElementsKindTraits::BackingStore* backing_store) {
+ JSArray* array = JSArray::cast(obj);
+
+ // Fast case: The new length fits into a Smi.
+ MaybeObject* maybe_smi_length = length->ToSmi();
+ Object* smi_length = Smi::FromInt(0);
+ if (maybe_smi_length->ToObject(&smi_length) && smi_length->IsSmi()) {
+ const int value = Smi::cast(smi_length)->value();
+ if (value >= 0) {
+ Object* new_length;
+ MaybeObject* result = ElementsAccessorSubclass::
+ SetLengthWithoutNormalize(backing_store, array, smi_length, value);
+ if (!result->ToObject(&new_length)) return result;
+ ASSERT(new_length->IsSmi() || new_length->IsUndefined());
+ if (new_length->IsSmi()) {
+ array->set_length(Smi::cast(new_length));
+ return array;
+ }
+ } else {
+ return ThrowArrayLengthRangeError(array->GetHeap());
+ }
+ }
+
+ // Slow case: The new length does not fit into a Smi or conversion
+ // to slow elements is needed for other reasons.
+ if (length->IsNumber()) {
+ uint32_t value;
+ if (length->ToArrayIndex(&value)) {
+ SeededNumberDictionary* dictionary;
+ MaybeObject* maybe_object = array->NormalizeElements();
+ if (!maybe_object->To(&dictionary)) return maybe_object;
+ Object* new_length;
+ MaybeObject* result = DictionaryElementsAccessor::
+ SetLengthWithoutNormalize(dictionary, array, length, value);
+ if (!result->ToObject(&new_length)) return result;
+ ASSERT(new_length->IsNumber());
+ array->set_length(new_length);
+ return array;
+ } else {
+ return ThrowArrayLengthRangeError(array->GetHeap());
+ }
+ }
+
+ // Fall-back case: The new length is not a number so make the array
+ // size one and set only element to length.
+ FixedArray* new_backing_store;
+ MaybeObject* maybe_obj = array->GetHeap()->AllocateFixedArray(1);
+ if (!maybe_obj->To(&new_backing_store)) return maybe_obj;
+ new_backing_store->set(0, length);
+ { MaybeObject* result = array->SetContent(new_backing_store);
+ if (result->IsFailure()) return result;
+ }
+ return array;
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/elements.h b/src/3rdparty/v8/src/elements.h
index 851c8c3..51d402d 100644
--- a/src/3rdparty/v8/src/elements.h
+++ b/src/3rdparty/v8/src/elements.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,8 @@
#define V8_ELEMENTS_H_
#include "objects.h"
+#include "heap.h"
+#include "isolate.h"
namespace v8 {
namespace internal {
@@ -37,21 +39,88 @@ namespace internal {
// ElementsKinds.
class ElementsAccessor {
public:
- ElementsAccessor() { }
+ explicit ElementsAccessor(const char* name) : name_(name) { }
virtual ~ElementsAccessor() { }
- virtual MaybeObject* Get(FixedArrayBase* backing_store,
- uint32_t key,
- JSObject* holder,
- Object* receiver) = 0;
+ virtual ElementsKind kind() const = 0;
+ const char* name() const { return name_; }
+
+ // Returns true if a holder contains an element with the specified key
+ // without iterating up the prototype chain. The caller can optionally pass
+ // in the backing store to use for the check, which must be compatible with
+ // the ElementsKind of the ElementsAccessor. If backing_store is NULL, the
+ // holder->elements() is used as the backing store.
+ virtual bool HasElement(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
+
+ // Returns the element with the specified key or undefined if there is no such
+ // element. This method doesn't iterate up the prototype chain. The caller
+ // can optionally pass in the backing store to use for the check, which must
+ // be compatible with the ElementsKind of the ElementsAccessor. If
+ // backing_store is NULL, the holder->elements() is used as the backing store.
+ virtual MaybeObject* Get(Object* receiver,
+ JSObject* holder,
+ uint32_t key,
+ FixedArrayBase* backing_store = NULL) = 0;
+
+ // Modifies the length data property as specified for JSArrays and resizes the
+ // underlying backing store accordingly. The method honors the semantics of
+ // changing array sizes as defined in EcmaScript 5.1 15.4.5.2, i.e. array that
+ // have non-deletable elements can only be shrunk to the size of highest
+ // element that is non-deletable.
+ virtual MaybeObject* SetLength(JSArray* holder,
+ Object* new_length) = 0;
+
+ // Modifies both the length and capacity of a JSArray, resizing the underlying
+ // backing store as necessary. This method does NOT honor the semantics of
+ // EcmaScript 5.1 15.4.5.2, arrays can be shrunk beyond non-deletable
+ // elements. This method should only be called for array expansion OR by
+ // runtime JavaScript code that use InternalArrays and don't care about
+ // EcmaScript 5.1 semantics.
+ virtual MaybeObject* SetCapacityAndLength(JSArray* array,
+ int capacity,
+ int length) = 0;
+
+ // Deletes an element in an object, returning a new elements backing store.
virtual MaybeObject* Delete(JSObject* holder,
uint32_t key,
JSReceiver::DeleteMode mode) = 0;
- virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
- FixedArray* to,
+ // If kCopyToEnd is specified as the copy_size to CopyElements, it copies all
+ // of elements from source after source_start to the destination array.
+ static const int kCopyToEnd = -1;
+ // If kCopyToEndAndInitializeToHole is specified as the copy_size to
+ // CopyElements, it copies all of elements from source after source_start to
+ // destination array, padding any remaining uninitialized elements in the
+ // destination array with the hole.
+ static const int kCopyToEndAndInitializeToHole = -2;
+
+ // Copy elements from one backing store to another. Typically, callers specify
+ // the source JSObject or JSArray in source_holder. If the holder's backing
+ // store is available, it can be passed in source and source_holder is
+ // ignored.
+ virtual MaybeObject* CopyElements(JSObject* source_holder,
+ uint32_t source_start,
+ FixedArrayBase* destination,
+ ElementsKind destination_kind,
+ uint32_t destination_start,
+ int copy_size,
+ FixedArrayBase* source = NULL) = 0;
+
+ MaybeObject* CopyElements(JSObject* from_holder,
+ FixedArrayBase* to,
+ ElementsKind to_kind,
+ FixedArrayBase* from = NULL) {
+ return CopyElements(from_holder, 0, to, to_kind, 0,
+ kCopyToEndAndInitializeToHole, from);
+ }
+
+ virtual MaybeObject* AddElementsToFixedArray(Object* receiver,
JSObject* holder,
- Object* receiver) = 0;
+ FixedArray* to,
+ FixedArrayBase* from = NULL) = 0;
// Returns a shared ElementsAccessor for the specified ElementsKind.
static ElementsAccessor* ForKind(ElementsKind elements_kind) {
@@ -62,34 +131,41 @@ class ElementsAccessor {
static ElementsAccessor* ForArray(FixedArrayBase* array);
static void InitializeOncePerProcess();
+ static void TearDown();
protected:
friend class NonStrictArgumentsElementsAccessor;
virtual uint32_t GetCapacity(FixedArrayBase* backing_store) = 0;
- virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
- uint32_t index,
- JSObject* holder,
- Object* receiver) = 0;
-
- // Element handlers distinguish between indexes and keys when the manipulate
+ // Element handlers distinguish between indexes and keys when they manipulate
// elements. Indexes refer to elements in terms of their location in the
- // underlying storage's backing store representation, and are between 0
+ // underlying storage's backing store representation, and are between 0 and
// GetCapacity. Keys refer to elements in terms of the value that would be
- // specific in JavaScript to access the element. In most implementations, keys
- // are equivalent to indexes, and GetKeyForIndex returns the same value it is
- // passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps the
- // index to a key using the KeyAt method on the NumberDictionary.
+ // specified in JavaScript to access the element. In most implementations,
+ // keys are equivalent to indexes, and GetKeyForIndex returns the same value
+ // it is passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps
+ // the index to a key using the KeyAt method on the NumberDictionary.
virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
uint32_t index) = 0;
private:
static ElementsAccessor** elements_accessors_;
+ const char* name_;
DISALLOW_COPY_AND_ASSIGN(ElementsAccessor);
};
+
+void CopyObjectToObjectElements(FixedArray* from_obj,
+ ElementsKind from_kind,
+ uint32_t from_start,
+ FixedArray* to_obj,
+ ElementsKind to_kind,
+ uint32_t to_start,
+ int copy_size);
+
+
} } // namespace v8::internal
#endif // V8_ELEMENTS_H_
diff --git a/src/3rdparty/v8/src/execution.cc b/src/3rdparty/v8/src/execution.cc
index 9d26526..5618975 100644
--- a/src/3rdparty/v8/src/execution.cc
+++ b/src/3rdparty/v8/src/execution.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -71,8 +71,7 @@ static Handle<Object> Invoke(bool is_construct,
Handle<Object> receiver,
int argc,
Handle<Object> args[],
- bool* has_pending_exception,
- Handle<Object> qml) {
+ bool* has_pending_exception) {
Isolate* isolate = function->GetIsolate();
// Entering JavaScript.
@@ -103,12 +102,6 @@ static Handle<Object> Invoke(bool is_construct,
// make the current one is indeed a global object.
ASSERT(function->context()->global()->IsGlobalObject());
- Handle<JSObject> oldqml;
- if (!qml.is_null()) {
- oldqml = Handle<JSObject>(function->context()->qml_global());
- function->context()->set_qml_global(JSObject::cast(*qml));
- }
-
{
// Save and restore context around invocation and block the
// allocation of handles without explicit handle scopes.
@@ -125,9 +118,6 @@ static Handle<Object> Invoke(bool is_construct,
CALL_GENERATED_CODE(stub_entry, function_entry, func, recv, argc, argv);
}
- if (!qml.is_null())
- function->context()->set_qml_global(*oldqml);
-
#ifdef DEBUG
value->Verify();
#endif
@@ -156,18 +146,7 @@ Handle<Object> Execution::Call(Handle<Object> callable,
int argc,
Handle<Object> argv[],
bool* pending_exception,
- bool convert_receiver)
-{
- return Call(callable, receiver, argc, argv, pending_exception, convert_receiver, Handle<Object>());
-}
-
-Handle<Object> Execution::Call(Handle<Object> callable,
- Handle<Object> receiver,
- int argc,
- Handle<Object> argv[],
- bool* pending_exception,
- bool convert_receiver,
- Handle<Object> qml) {
+ bool convert_receiver) {
*pending_exception = false;
if (!callable->IsJSFunction()) {
@@ -178,7 +157,7 @@ Handle<Object> Execution::Call(Handle<Object> callable,
// In non-strict mode, convert receiver.
if (convert_receiver && !receiver->IsJSReceiver() &&
- !func->shared()->native() && !func->shared()->strict_mode()) {
+ !func->shared()->native() && func->shared()->is_classic_mode()) {
if (receiver->IsUndefined() || receiver->IsNull()) {
Object* global = func->context()->global()->global_receiver();
// Under some circumstances, 'global' can be the JSBuiltinsObject
@@ -191,7 +170,7 @@ Handle<Object> Execution::Call(Handle<Object> callable,
if (*pending_exception) return callable;
}
- return Invoke(false, func, receiver, argc, argv, pending_exception, qml);
+ return Invoke(false, func, receiver, argc, argv, pending_exception);
}
@@ -200,7 +179,7 @@ Handle<Object> Execution::New(Handle<JSFunction> func,
Handle<Object> argv[],
bool* pending_exception) {
return Invoke(true, func, Isolate::Current()->global(), argc, argv,
- pending_exception, Handle<Object>());
+ pending_exception);
}
@@ -219,7 +198,7 @@ Handle<Object> Execution::TryCall(Handle<JSFunction> func,
*caught_exception = false;
Handle<Object> result = Invoke(false, func, receiver, argc, args,
- caught_exception, Handle<Object>());
+ caught_exception);
if (*caught_exception) {
ASSERT(catcher.HasCaught());
@@ -377,7 +356,7 @@ void StackGuard::EnableInterrupts() {
void StackGuard::SetStackLimit(uintptr_t limit) {
ExecutionAccess access(isolate_);
- // If the current limits are special (eg due to a pending interrupt) then
+ // If the current limits are special (e.g. due to a pending interrupt) then
// leave them alone.
uintptr_t jslimit = SimulatorStack::JsLimitFromCLimit(isolate_, limit);
if (thread_local_.jslimit_ == thread_local_.real_jslimit_) {
@@ -397,6 +376,12 @@ void StackGuard::DisableInterrupts() {
}
+bool StackGuard::ShouldPostponeInterrupts() {
+ ExecutionAccess access(isolate_);
+ return should_postpone_interrupts(access);
+}
+
+
bool StackGuard::IsInterrupted() {
ExecutionAccess access(isolate_);
return (thread_local_.interrupt_flags_ & INTERRUPT) != 0;
@@ -841,6 +826,11 @@ Object* Execution::DebugBreakHelper() {
return isolate->heap()->undefined_value();
}
+ StackLimitCheck check(isolate);
+ if (check.HasOverflowed()) {
+ return isolate->heap()->undefined_value();
+ }
+
{
JavaScriptFrameIterator it(isolate);
ASSERT(!it.done());
@@ -866,17 +856,22 @@ Object* Execution::DebugBreakHelper() {
// Clear the debug break request flag.
isolate->stack_guard()->Continue(DEBUGBREAK);
- ProcessDebugMesssages(debug_command_only);
+ ProcessDebugMessages(debug_command_only);
// Return to continue execution.
return isolate->heap()->undefined_value();
}
-void Execution::ProcessDebugMesssages(bool debug_command_only) {
+void Execution::ProcessDebugMessages(bool debug_command_only) {
Isolate* isolate = Isolate::Current();
// Clear the debug command request flag.
isolate->stack_guard()->Continue(DEBUGCOMMAND);
+ StackLimitCheck check(isolate);
+ if (check.HasOverflowed()) {
+ return;
+ }
+
HandleScope scope(isolate);
// Enter the debugger. Just continue if we fail to enter the debugger.
EnterDebugger debugger;
@@ -893,17 +888,22 @@ void Execution::ProcessDebugMesssages(bool debug_command_only) {
#endif
-MaybeObject* Execution::HandleStackGuardInterrupt() {
- Isolate* isolate = Isolate::Current();
+MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) {
StackGuard* stack_guard = isolate->stack_guard();
+ if (stack_guard->ShouldPostponeInterrupts()) {
+ return isolate->heap()->undefined_value();
+ }
if (stack_guard->IsGCRequest()) {
- isolate->heap()->CollectAllGarbage(false);
+ isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags,
+ "StackGuard GC request");
stack_guard->Continue(GC_REQUEST);
}
isolate->counters()->stack_interrupts()->Increment();
- if (stack_guard->IsRuntimeProfilerTick()) {
+ // If FLAG_count_based_interrupts, every interrupt is a profiler interrupt.
+ if (FLAG_count_based_interrupts ||
+ stack_guard->IsRuntimeProfilerTick()) {
isolate->counters()->runtime_profiler_ticks()->Increment();
stack_guard->Continue(RUNTIME_PROFILER_TICK);
isolate->runtime_profiler()->OptimizeNow();
@@ -925,4 +925,5 @@ MaybeObject* Execution::HandleStackGuardInterrupt() {
return isolate->heap()->undefined_value();
}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/execution.h b/src/3rdparty/v8/src/execution.h
index 532e5d8..01e4b9d 100644
--- a/src/3rdparty/v8/src/execution.h
+++ b/src/3rdparty/v8/src/execution.h
@@ -45,6 +45,10 @@ enum InterruptFlag {
GC_REQUEST = 1 << 6
};
+
+class Isolate;
+
+
class Execution : public AllStatic {
public:
// Call a function, the caller supplies a receiver and an array
@@ -65,14 +69,6 @@ class Execution : public AllStatic {
bool* pending_exception,
bool convert_receiver = false);
- static Handle<Object> Call(Handle<Object> callable,
- Handle<Object> receiver,
- int argc,
- Handle<Object> argv[],
- bool* pending_exception,
- bool convert_receiver,
- Handle<Object> qml);
-
// Construct object from function, the caller supplies an array of
// arguments. Arguments are Object* type. After function returns,
// pointers in 'args' might be invalid.
@@ -144,12 +140,13 @@ class Execution : public AllStatic {
Handle<Object> is_global);
#ifdef ENABLE_DEBUGGER_SUPPORT
static Object* DebugBreakHelper();
- static void ProcessDebugMesssages(bool debug_command_only);
+ static void ProcessDebugMessages(bool debug_command_only);
#endif
// If the stack guard is triggered, but it is not an actual
// stack overflow, then handle the interruption accordingly.
- MUST_USE_RESULT static MaybeObject* HandleStackGuardInterrupt();
+ MUST_USE_RESULT static MaybeObject* HandleStackGuardInterrupt(
+ Isolate* isolate);
// Get a function delegate (or undefined) for the given non-function
// object. Used for support calling objects as functions.
@@ -166,7 +163,6 @@ class Execution : public AllStatic {
class ExecutionAccess;
-class Isolate;
// StackGuard contains the handling of the limits that are used to limit the
@@ -230,6 +226,7 @@ class StackGuard {
Address address_of_real_jslimit() {
return reinterpret_cast<Address>(&thread_local_.real_jslimit_);
}
+ bool ShouldPostponeInterrupts();
private:
StackGuard();
diff --git a/src/3rdparty/v8/src/extensions/experimental/break-iterator.cc b/src/3rdparty/v8/src/extensions/experimental/break-iterator.cc
deleted file mode 100644
index e695a3e..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/break-iterator.cc
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/break-iterator.h"
-
-#include <string.h>
-
-#include "unicode/brkiter.h"
-#include "unicode/locid.h"
-#include "unicode/rbbi.h"
-
-namespace v8 {
-namespace internal {
-
-v8::Persistent<v8::FunctionTemplate> BreakIterator::break_iterator_template_;
-
-icu::BreakIterator* BreakIterator::UnpackBreakIterator(
- v8::Handle<v8::Object> obj) {
- if (break_iterator_template_->HasInstance(obj)) {
- return static_cast<icu::BreakIterator*>(
- obj->GetPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-icu::UnicodeString* BreakIterator::ResetAdoptedText(
- v8::Handle<v8::Object> obj, v8::Handle<v8::Value> value) {
- // Get the previous value from the internal field.
- icu::UnicodeString* text = static_cast<icu::UnicodeString*>(
- obj->GetPointerFromInternalField(1));
- delete text;
-
- // Assign new value to the internal pointer.
- v8::String::Value text_value(value);
- text = new icu::UnicodeString(
- reinterpret_cast<const UChar*>(*text_value), text_value.length());
- obj->SetPointerInInternalField(1, text);
-
- // Return new unicode string pointer.
- return text;
-}
-
-void BreakIterator::DeleteBreakIterator(v8::Persistent<v8::Value> object,
- void* param) {
- v8::Persistent<v8::Object> persistent_object =
- v8::Persistent<v8::Object>::Cast(object);
-
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a break iterator.
- delete UnpackBreakIterator(persistent_object);
-
- delete static_cast<icu::UnicodeString*>(
- persistent_object->GetPointerFromInternalField(1));
-
- // Then dispose of the persistent handle to JS object.
- persistent_object.Dispose();
-}
-
-// Throws a JavaScript exception.
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
- // Returns undefined, and schedules an exception to be thrown.
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("BreakIterator method called on an object "
- "that is not a BreakIterator.")));
-}
-
-v8::Handle<v8::Value> BreakIterator::BreakIteratorAdoptText(
- const v8::Arguments& args) {
- if (args.Length() != 1 || !args[0]->IsString()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Text input is required.")));
- }
-
- icu::BreakIterator* break_iterator = UnpackBreakIterator(args.Holder());
- if (!break_iterator) {
- return ThrowUnexpectedObjectError();
- }
-
- break_iterator->setText(*ResetAdoptedText(args.Holder(), args[0]));
-
- return v8::Undefined();
-}
-
-v8::Handle<v8::Value> BreakIterator::BreakIteratorFirst(
- const v8::Arguments& args) {
- icu::BreakIterator* break_iterator = UnpackBreakIterator(args.Holder());
- if (!break_iterator) {
- return ThrowUnexpectedObjectError();
- }
-
- return v8::Int32::New(break_iterator->first());
-}
-
-v8::Handle<v8::Value> BreakIterator::BreakIteratorNext(
- const v8::Arguments& args) {
- icu::BreakIterator* break_iterator = UnpackBreakIterator(args.Holder());
- if (!break_iterator) {
- return ThrowUnexpectedObjectError();
- }
-
- return v8::Int32::New(break_iterator->next());
-}
-
-v8::Handle<v8::Value> BreakIterator::BreakIteratorCurrent(
- const v8::Arguments& args) {
- icu::BreakIterator* break_iterator = UnpackBreakIterator(args.Holder());
- if (!break_iterator) {
- return ThrowUnexpectedObjectError();
- }
-
- return v8::Int32::New(break_iterator->current());
-}
-
-v8::Handle<v8::Value> BreakIterator::BreakIteratorBreakType(
- const v8::Arguments& args) {
- icu::BreakIterator* break_iterator = UnpackBreakIterator(args.Holder());
- if (!break_iterator) {
- return ThrowUnexpectedObjectError();
- }
-
- // TODO(cira): Remove cast once ICU fixes base BreakIterator class.
- icu::RuleBasedBreakIterator* rule_based_iterator =
- static_cast<icu::RuleBasedBreakIterator*>(break_iterator);
- int32_t status = rule_based_iterator->getRuleStatus();
- // Keep return values in sync with JavaScript BreakType enum.
- if (status >= UBRK_WORD_NONE && status < UBRK_WORD_NONE_LIMIT) {
- return v8::Int32::New(UBRK_WORD_NONE);
- } else if (status >= UBRK_WORD_NUMBER && status < UBRK_WORD_NUMBER_LIMIT) {
- return v8::Int32::New(UBRK_WORD_NUMBER);
- } else if (status >= UBRK_WORD_LETTER && status < UBRK_WORD_LETTER_LIMIT) {
- return v8::Int32::New(UBRK_WORD_LETTER);
- } else if (status >= UBRK_WORD_KANA && status < UBRK_WORD_KANA_LIMIT) {
- return v8::Int32::New(UBRK_WORD_KANA);
- } else if (status >= UBRK_WORD_IDEO && status < UBRK_WORD_IDEO_LIMIT) {
- return v8::Int32::New(UBRK_WORD_IDEO);
- } else {
- return v8::Int32::New(-1);
- }
-}
-
-v8::Handle<v8::Value> BreakIterator::JSBreakIterator(
- const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale and iterator type are required.")));
- }
-
- v8::String::Utf8Value locale(args[0]);
- icu::Locale icu_locale(*locale);
-
- UErrorCode status = U_ZERO_ERROR;
- icu::BreakIterator* break_iterator = NULL;
- v8::String::Utf8Value type(args[1]);
- if (!strcmp(*type, "character")) {
- break_iterator =
- icu::BreakIterator::createCharacterInstance(icu_locale, status);
- } else if (!strcmp(*type, "word")) {
- break_iterator =
- icu::BreakIterator::createWordInstance(icu_locale, status);
- } else if (!strcmp(*type, "sentence")) {
- break_iterator =
- icu::BreakIterator::createSentenceInstance(icu_locale, status);
- } else if (!strcmp(*type, "line")) {
- break_iterator =
- icu::BreakIterator::createLineInstance(icu_locale, status);
- } else {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Invalid iterator type.")));
- }
-
- if (U_FAILURE(status)) {
- delete break_iterator;
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("Failed to create break iterator.")));
- }
-
- if (break_iterator_template_.IsEmpty()) {
- v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
-
- raw_template->SetClassName(v8::String::New("v8Locale.v8BreakIterator"));
-
- // Define internal field count on instance template.
- v8::Local<v8::ObjectTemplate> object_template =
- raw_template->InstanceTemplate();
-
- // Set aside internal fields for icu break iterator and adopted text.
- object_template->SetInternalFieldCount(2);
-
- // Define all of the prototype methods on prototype template.
- v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
- proto->Set(v8::String::New("adoptText"),
- v8::FunctionTemplate::New(BreakIteratorAdoptText));
- proto->Set(v8::String::New("first"),
- v8::FunctionTemplate::New(BreakIteratorFirst));
- proto->Set(v8::String::New("next"),
- v8::FunctionTemplate::New(BreakIteratorNext));
- proto->Set(v8::String::New("current"),
- v8::FunctionTemplate::New(BreakIteratorCurrent));
- proto->Set(v8::String::New("breakType"),
- v8::FunctionTemplate::New(BreakIteratorBreakType));
-
- break_iterator_template_ =
- v8::Persistent<v8::FunctionTemplate>::New(raw_template);
- }
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object =
- break_iterator_template_->GetFunction()->NewInstance();
- v8::Persistent<v8::Object> wrapper =
- v8::Persistent<v8::Object>::New(local_object);
-
- // Set break iterator as internal field of the resulting JS object.
- wrapper->SetPointerInInternalField(0, break_iterator);
- // Make sure that the pointer to adopted text is NULL.
- wrapper->SetPointerInInternalField(1, NULL);
-
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak(NULL, DeleteBreakIterator);
-
- return wrapper;
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/break-iterator.h b/src/3rdparty/v8/src/extensions/experimental/break-iterator.h
deleted file mode 100644
index 73b9bbd..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/break-iterator.h
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_
-
-#include "include/v8.h"
-
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class BreakIterator;
-class UnicodeString;
-}
-
-namespace v8 {
-namespace internal {
-
-class BreakIterator {
- public:
- static v8::Handle<v8::Value> JSBreakIterator(const v8::Arguments& args);
-
- // Helper methods for various bindings.
-
- // Unpacks break iterator object from corresponding JavaScript object.
- static icu::BreakIterator* UnpackBreakIterator(v8::Handle<v8::Object> obj);
-
- // Deletes the old value and sets the adopted text in
- // corresponding JavaScript object.
- static icu::UnicodeString* ResetAdoptedText(v8::Handle<v8::Object> obj,
- v8::Handle<v8::Value> text_value);
-
- // Release memory we allocated for the BreakIterator once the JS object that
- // holds the pointer gets garbage collected.
- static void DeleteBreakIterator(v8::Persistent<v8::Value> object,
- void* param);
-
- // Assigns new text to the iterator.
- static v8::Handle<v8::Value> BreakIteratorAdoptText(
- const v8::Arguments& args);
-
- // Moves iterator to the beginning of the string and returns new position.
- static v8::Handle<v8::Value> BreakIteratorFirst(const v8::Arguments& args);
-
- // Moves iterator to the next position and returns it.
- static v8::Handle<v8::Value> BreakIteratorNext(const v8::Arguments& args);
-
- // Returns current iterator's current position.
- static v8::Handle<v8::Value> BreakIteratorCurrent(
- const v8::Arguments& args);
-
- // Returns type of the item from current position.
- // This call is only valid for word break iterators. Others just return 0.
- static v8::Handle<v8::Value> BreakIteratorBreakType(
- const v8::Arguments& args);
-
- private:
- BreakIterator() {}
-
- static v8::Persistent<v8::FunctionTemplate> break_iterator_template_;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_BREAK_ITERATOR_H_
diff --git a/src/3rdparty/v8/src/extensions/experimental/collator.cc b/src/3rdparty/v8/src/extensions/experimental/collator.cc
deleted file mode 100644
index 5cf2192..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/collator.cc
+++ /dev/null
@@ -1,222 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/collator.h"
-
-#include "unicode/coll.h"
-#include "unicode/locid.h"
-#include "unicode/ucol.h"
-
-namespace v8 {
-namespace internal {
-
-v8::Persistent<v8::FunctionTemplate> Collator::collator_template_;
-
-icu::Collator* Collator::UnpackCollator(v8::Handle<v8::Object> obj) {
- if (collator_template_->HasInstance(obj)) {
- return static_cast<icu::Collator*>(obj->GetPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-void Collator::DeleteCollator(v8::Persistent<v8::Value> object, void* param) {
- v8::Persistent<v8::Object> persistent_object =
- v8::Persistent<v8::Object>::Cast(object);
-
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a collator.
- delete UnpackCollator(persistent_object);
-
- // Then dispose of the persistent handle to JS object.
- persistent_object.Dispose();
-}
-
-// Throws a JavaScript exception.
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
- // Returns undefined, and schedules an exception to be thrown.
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("Collator method called on an object "
- "that is not a Collator.")));
-}
-
-// Extract a boolean option named in |option| and set it to |result|.
-// Return true if it's specified. Otherwise, return false.
-static bool ExtractBooleanOption(const v8::Local<v8::Object>& options,
- const char* option,
- bool* result) {
- v8::HandleScope handle_scope;
- v8::TryCatch try_catch;
- v8::Handle<v8::Value> value = options->Get(v8::String::New(option));
- if (try_catch.HasCaught()) {
- return false;
- }
- // No need to check if |value| is empty because it's taken care of
- // by TryCatch above.
- if (!value->IsUndefined() && !value->IsNull()) {
- if (value->IsBoolean()) {
- *result = value->BooleanValue();
- return true;
- }
- }
- return false;
-}
-
-// When there's an ICU error, throw a JavaScript error with |message|.
-static v8::Handle<v8::Value> ThrowExceptionForICUError(const char* message) {
- return v8::ThrowException(v8::Exception::Error(v8::String::New(message)));
-}
-
-v8::Handle<v8::Value> Collator::CollatorCompare(const v8::Arguments& args) {
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsString()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Two string arguments are required.")));
- }
-
- icu::Collator* collator = UnpackCollator(args.Holder());
- if (!collator) {
- return ThrowUnexpectedObjectError();
- }
-
- v8::String::Value string_value1(args[0]);
- v8::String::Value string_value2(args[1]);
- const UChar* string1 = reinterpret_cast<const UChar*>(*string_value1);
- const UChar* string2 = reinterpret_cast<const UChar*>(*string_value2);
- UErrorCode status = U_ZERO_ERROR;
- UCollationResult result = collator->compare(
- string1, string_value1.length(), string2, string_value2.length(), status);
-
- if (U_FAILURE(status)) {
- return ThrowExceptionForICUError(
- "Unexpected failure in Collator.compare.");
- }
-
- return v8::Int32::New(result);
-}
-
-v8::Handle<v8::Value> Collator::JSCollator(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsObject()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale and collation options are required.")));
- }
-
- v8::String::AsciiValue locale(args[0]);
- icu::Locale icu_locale(*locale);
-
- icu::Collator* collator = NULL;
- UErrorCode status = U_ZERO_ERROR;
- collator = icu::Collator::createInstance(icu_locale, status);
-
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to create collator.");
- }
-
- v8::Local<v8::Object> options(args[1]->ToObject());
-
- // Below, we change collation options that are explicitly specified
- // by a caller in JavaScript. Otherwise, we don't touch because
- // we don't want to change the locale-dependent default value.
- // The three options below are very likely to have the same default
- // across locales, but I haven't checked them all. Others we may add
- // in the future have certainly locale-dependent default (e.g.
- // caseFirst is upperFirst for Danish while is off for most other locales).
-
- bool ignore_case, ignore_accents, numeric;
-
- if (ExtractBooleanOption(options, "ignoreCase", &ignore_case)) {
- // We need to explicitly set the level to secondary to get case ignored.
- // The default L3 ignores UCOL_CASE_LEVEL == UCOL_OFF !
- if (ignore_case) {
- collator->setStrength(icu::Collator::SECONDARY);
- }
- collator->setAttribute(UCOL_CASE_LEVEL, ignore_case ? UCOL_OFF : UCOL_ON,
- status);
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to set ignoreCase.");
- }
- }
-
- // Accents are taken into account with strength secondary or higher.
- if (ExtractBooleanOption(options, "ignoreAccents", &ignore_accents)) {
- if (!ignore_accents) {
- collator->setStrength(icu::Collator::SECONDARY);
- } else {
- collator->setStrength(icu::Collator::PRIMARY);
- }
- }
-
- if (ExtractBooleanOption(options, "numeric", &numeric)) {
- collator->setAttribute(UCOL_NUMERIC_COLLATION,
- numeric ? UCOL_ON : UCOL_OFF, status);
- if (U_FAILURE(status)) {
- delete collator;
- return ThrowExceptionForICUError("Failed to set numeric sort option.");
- }
- }
-
- if (collator_template_.IsEmpty()) {
- v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
- raw_template->SetClassName(v8::String::New("v8Locale.Collator"));
-
- // Define internal field count on instance template.
- v8::Local<v8::ObjectTemplate> object_template =
- raw_template->InstanceTemplate();
-
- // Set aside internal fields for icu collator.
- object_template->SetInternalFieldCount(1);
-
- // Define all of the prototype methods on prototype template.
- v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
- proto->Set(v8::String::New("compare"),
- v8::FunctionTemplate::New(CollatorCompare));
-
- collator_template_ =
- v8::Persistent<v8::FunctionTemplate>::New(raw_template);
- }
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object =
- collator_template_->GetFunction()->NewInstance();
- v8::Persistent<v8::Object> wrapper =
- v8::Persistent<v8::Object>::New(local_object);
-
- // Set collator as internal field of the resulting JS object.
- wrapper->SetPointerInInternalField(0, collator);
-
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak(NULL, DeleteCollator);
-
- return wrapper;
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/datetime-format.cc b/src/3rdparty/v8/src/extensions/experimental/datetime-format.cc
deleted file mode 100644
index 94a29ac..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/datetime-format.cc
+++ /dev/null
@@ -1,384 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/datetime-format.h"
-
-#include <string.h>
-
-#include "src/extensions/experimental/i18n-utils.h"
-#include "unicode/dtfmtsym.h"
-#include "unicode/dtptngen.h"
-#include "unicode/locid.h"
-#include "unicode/smpdtfmt.h"
-
-namespace v8 {
-namespace internal {
-
-v8::Persistent<v8::FunctionTemplate> DateTimeFormat::datetime_format_template_;
-
-static icu::DateFormat* CreateDateTimeFormat(v8::Handle<v8::String>,
- v8::Handle<v8::Object>);
-static v8::Handle<v8::Value> GetSymbols(
- const v8::Arguments&,
- const icu::UnicodeString*, int32_t,
- const icu::UnicodeString*, int32_t,
- const icu::UnicodeString*, int32_t);
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError();
-static icu::DateFormat::EStyle GetDateTimeStyle(const icu::UnicodeString&);
-
-icu::SimpleDateFormat* DateTimeFormat::UnpackDateTimeFormat(
- v8::Handle<v8::Object> obj) {
- if (datetime_format_template_->HasInstance(obj)) {
- return static_cast<icu::SimpleDateFormat*>(
- obj->GetPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-void DateTimeFormat::DeleteDateTimeFormat(v8::Persistent<v8::Value> object,
- void* param) {
- v8::Persistent<v8::Object> persistent_object =
- v8::Persistent<v8::Object>::Cast(object);
-
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a date time formatter.
- delete UnpackDateTimeFormat(persistent_object);
-
- // Then dispose of the persistent handle to JS object.
- persistent_object.Dispose();
-}
-
-v8::Handle<v8::Value> DateTimeFormat::Format(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- double millis = 0.0;
- if (args.Length() != 1 || !args[0]->IsDate()) {
- // Create a new date.
- v8::TryCatch try_catch;
- v8::Local<v8::Script> date_script =
- v8::Script::Compile(v8::String::New("eval('new Date()')"));
- millis = date_script->Run()->NumberValue();
- if (try_catch.HasCaught()) {
- return try_catch.ReThrow();
- }
- } else {
- millis = v8::Date::Cast(*args[0])->NumberValue();
- }
-
- icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
- if (!date_format) {
- return ThrowUnexpectedObjectError();
- }
-
- icu::UnicodeString result;
- date_format->format(millis, result);
-
- return v8::String::New(
- reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
-}
-
-v8::Handle<v8::Value> DateTimeFormat::GetMonths(const v8::Arguments& args) {
- icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
- if (!date_format) {
- return ThrowUnexpectedObjectError();
- }
-
- const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols();
-
- int32_t narrow_count;
- const icu::UnicodeString* narrow = symbols->getMonths(
- narrow_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::NARROW);
- int32_t abbrev_count;
- const icu::UnicodeString* abbrev = symbols->getMonths(
- abbrev_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::ABBREVIATED);
- int32_t wide_count;
- const icu::UnicodeString* wide = symbols->getMonths(
- wide_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::WIDE);
-
- return GetSymbols(
- args, narrow, narrow_count, abbrev, abbrev_count, wide, wide_count);
-}
-
-v8::Handle<v8::Value> DateTimeFormat::GetWeekdays(const v8::Arguments& args) {
- icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
- if (!date_format) {
- return ThrowUnexpectedObjectError();
- }
-
- const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols();
-
- int32_t narrow_count;
- const icu::UnicodeString* narrow = symbols->getWeekdays(
- narrow_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::NARROW);
- int32_t abbrev_count;
- const icu::UnicodeString* abbrev = symbols->getWeekdays(
- abbrev_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::ABBREVIATED);
- int32_t wide_count;
- const icu::UnicodeString* wide = symbols->getWeekdays(
- wide_count,
- icu::DateFormatSymbols::STANDALONE,
- icu::DateFormatSymbols::WIDE);
-
- // getXXXWeekdays always returns 8 elements - ICU stable API.
- // We can't use ASSERT_EQ(8, narrow_count) because ASSERT is internal to v8.
- if (narrow_count != 8 || abbrev_count != 8 || wide_count != 8) {
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("Failed to get weekday information.")));
- }
-
- // ICU documentation says we should ignore element 0 of the returned array.
- return GetSymbols(args, narrow + 1, narrow_count - 1, abbrev + 1,
- abbrev_count -1 , wide + 1, wide_count - 1);
-}
-
-v8::Handle<v8::Value> DateTimeFormat::GetEras(const v8::Arguments& args) {
- icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
- if (!date_format) {
- return ThrowUnexpectedObjectError();
- }
-
- const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols();
-
- int32_t narrow_count;
- const icu::UnicodeString* narrow = symbols->getNarrowEras(narrow_count);
- int32_t abbrev_count;
- const icu::UnicodeString* abbrev = symbols->getEras(abbrev_count);
- int32_t wide_count;
- const icu::UnicodeString* wide = symbols->getEraNames(wide_count);
-
- return GetSymbols(
- args, narrow, narrow_count, abbrev, abbrev_count, wide, wide_count);
-}
-
-v8::Handle<v8::Value> DateTimeFormat::GetAmPm(const v8::Arguments& args) {
- icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
- if (!date_format) {
- return ThrowUnexpectedObjectError();
- }
-
- const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols();
-
- // In this case narrow == abbreviated == wide
- int32_t count;
- const icu::UnicodeString* wide = symbols->getAmPmStrings(count);
-
- return GetSymbols(args, wide, count, wide, count, wide, count);
-}
-
-v8::Handle<v8::Value> DateTimeFormat::JSDateTimeFormat(
- const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 2 || !args[0]->IsString() || !args[1]->IsObject()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale and date/time options are required.")));
- }
-
- icu::SimpleDateFormat* date_format = static_cast<icu::SimpleDateFormat*>(
- CreateDateTimeFormat(args[0]->ToString(), args[1]->ToObject()));
-
- if (datetime_format_template_.IsEmpty()) {
- v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
-
- raw_template->SetClassName(v8::String::New("v8Locale.DateTimeFormat"));
-
- // Define internal field count on instance template.
- v8::Local<v8::ObjectTemplate> object_template =
- raw_template->InstanceTemplate();
-
- // Set aside internal field for icu date time formatter.
- object_template->SetInternalFieldCount(1);
-
- // Define all of the prototype methods on prototype template.
- v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
- proto->Set(v8::String::New("format"),
- v8::FunctionTemplate::New(Format));
- proto->Set(v8::String::New("getMonths"),
- v8::FunctionTemplate::New(GetMonths));
- proto->Set(v8::String::New("getWeekdays"),
- v8::FunctionTemplate::New(GetWeekdays));
- proto->Set(v8::String::New("getEras"),
- v8::FunctionTemplate::New(GetEras));
- proto->Set(v8::String::New("getAmPm"),
- v8::FunctionTemplate::New(GetAmPm));
-
- datetime_format_template_ =
- v8::Persistent<v8::FunctionTemplate>::New(raw_template);
- }
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object =
- datetime_format_template_->GetFunction()->NewInstance();
- v8::Persistent<v8::Object> wrapper =
- v8::Persistent<v8::Object>::New(local_object);
-
- // Set date time formatter as internal field of the resulting JS object.
- wrapper->SetPointerInInternalField(0, date_format);
-
- // Set resolved pattern in options.pattern.
- icu::UnicodeString pattern;
- date_format->toPattern(pattern);
- v8::Local<v8::Object> options = v8::Object::New();
- options->Set(v8::String::New("pattern"),
- v8::String::New(reinterpret_cast<const uint16_t*>(
- pattern.getBuffer()), pattern.length()));
- wrapper->Set(v8::String::New("options"), options);
-
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak(NULL, DeleteDateTimeFormat);
-
- return wrapper;
-}
-
-// Returns SimpleDateFormat.
-static icu::DateFormat* CreateDateTimeFormat(
- v8::Handle<v8::String> locale, v8::Handle<v8::Object> settings) {
- v8::HandleScope handle_scope;
-
- v8::String::AsciiValue ascii_locale(locale);
- icu::Locale icu_locale(*ascii_locale);
-
- // Make formatter from skeleton.
- icu::SimpleDateFormat* date_format = NULL;
- UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString skeleton;
- if (I18NUtils::ExtractStringSetting(settings, "skeleton", &skeleton)) {
- v8::Local<icu::DateTimePatternGenerator> generator(
- icu::DateTimePatternGenerator::createInstance(icu_locale, status));
- icu::UnicodeString pattern =
- generator->getBestPattern(skeleton, status);
-
- date_format = new icu::SimpleDateFormat(pattern, icu_locale, status);
- if (U_SUCCESS(status)) {
- return date_format;
- } else {
- delete date_format;
- }
- }
-
- // Extract date style and time style from settings.
- icu::UnicodeString date_style;
- icu::DateFormat::EStyle icu_date_style = icu::DateFormat::kNone;
- if (I18NUtils::ExtractStringSetting(settings, "dateStyle", &date_style)) {
- icu_date_style = GetDateTimeStyle(date_style);
- }
-
- icu::UnicodeString time_style;
- icu::DateFormat::EStyle icu_time_style = icu::DateFormat::kNone;
- if (I18NUtils::ExtractStringSetting(settings, "timeStyle", &time_style)) {
- icu_time_style = GetDateTimeStyle(time_style);
- }
-
- // Try all combinations of date/time styles.
- if (icu_date_style == icu::DateFormat::kNone &&
- icu_time_style == icu::DateFormat::kNone) {
- // Return default short date, short
- return icu::DateFormat::createDateTimeInstance(
- icu::DateFormat::kShort, icu::DateFormat::kShort, icu_locale);
- } else if (icu_date_style != icu::DateFormat::kNone &&
- icu_time_style != icu::DateFormat::kNone) {
- return icu::DateFormat::createDateTimeInstance(
- icu_date_style, icu_time_style, icu_locale);
- } else if (icu_date_style != icu::DateFormat::kNone) {
- return icu::DateFormat::createDateInstance(icu_date_style, icu_locale);
- } else {
- // icu_time_style != icu::DateFormat::kNone
- return icu::DateFormat::createTimeInstance(icu_time_style, icu_locale);
- }
-}
-
-// Creates a v8::Array of narrow, abbrev or wide symbols.
-static v8::Handle<v8::Value> GetSymbols(const v8::Arguments& args,
- const icu::UnicodeString* narrow,
- int32_t narrow_count,
- const icu::UnicodeString* abbrev,
- int32_t abbrev_count,
- const icu::UnicodeString* wide,
- int32_t wide_count) {
- v8::HandleScope handle_scope;
-
- // Make wide width default.
- const icu::UnicodeString* result = wide;
- int32_t count = wide_count;
-
- if (args.Length() == 1 && args[0]->IsString()) {
- v8::String::AsciiValue ascii_value(args[0]);
- if (strcmp(*ascii_value, "abbreviated") == 0) {
- result = abbrev;
- count = abbrev_count;
- } else if (strcmp(*ascii_value, "narrow") == 0) {
- result = narrow;
- count = narrow_count;
- }
- }
-
- v8::Handle<v8::Array> symbols = v8::Array::New();
- for (int32_t i = 0; i < count; ++i) {
- symbols->Set(i, v8::String::New(
- reinterpret_cast<const uint16_t*>(result[i].getBuffer()),
- result[i].length()));
- }
-
- return handle_scope.Close(symbols);
-}
-
-// Throws a JavaScript exception.
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
- // Returns undefined, and schedules an exception to be thrown.
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("DateTimeFormat method called on an object "
- "that is not a DateTimeFormat.")));
-}
-
-// Returns icu date/time style.
-static icu::DateFormat::EStyle GetDateTimeStyle(
- const icu::UnicodeString& type) {
- if (type == UNICODE_STRING_SIMPLE("medium")) {
- return icu::DateFormat::kMedium;
- } else if (type == UNICODE_STRING_SIMPLE("long")) {
- return icu::DateFormat::kLong;
- } else if (type == UNICODE_STRING_SIMPLE("full")) {
- return icu::DateFormat::kFull;
- }
-
- return icu::DateFormat::kShort;
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/datetime-format.h b/src/3rdparty/v8/src/extensions/experimental/datetime-format.h
deleted file mode 100644
index a6a228c..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/datetime-format.h
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_
-
-#include "include/v8.h"
-
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class SimpleDateFormat;
-}
-
-namespace v8 {
-namespace internal {
-
-class DateTimeFormat {
- public:
- static v8::Handle<v8::Value> JSDateTimeFormat(const v8::Arguments& args);
-
- // Helper methods for various bindings.
-
- // Unpacks date format object from corresponding JavaScript object.
- static icu::SimpleDateFormat* UnpackDateTimeFormat(
- v8::Handle<v8::Object> obj);
-
- // Release memory we allocated for the DateFormat once the JS object that
- // holds the pointer gets garbage collected.
- static void DeleteDateTimeFormat(v8::Persistent<v8::Value> object,
- void* param);
-
- // Formats date and returns corresponding string.
- static v8::Handle<v8::Value> Format(const v8::Arguments& args);
-
- // All date time symbol methods below return stand-alone names in
- // either narrow, abbreviated or wide width.
-
- // Get list of months.
- static v8::Handle<v8::Value> GetMonths(const v8::Arguments& args);
-
- // Get list of weekdays.
- static v8::Handle<v8::Value> GetWeekdays(const v8::Arguments& args);
-
- // Get list of eras.
- static v8::Handle<v8::Value> GetEras(const v8::Arguments& args);
-
- // Get list of day periods.
- static v8::Handle<v8::Value> GetAmPm(const v8::Arguments& args);
-
- private:
- DateTimeFormat();
-
- static v8::Persistent<v8::FunctionTemplate> datetime_format_template_;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_DATETIME_FORMAT_H_
diff --git a/src/3rdparty/v8/src/extensions/experimental/experimental.gyp b/src/3rdparty/v8/src/extensions/experimental/experimental.gyp
deleted file mode 100644
index 24fb683..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/experimental.gyp
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-{
- 'variables': {
- # TODO(cira): Find out how to pass this value for arbitrary embedder.
- # Chromium sets it in common.gypi and does force include of that file for
- # all sub projects.
- 'icu_src_dir%': '../../../../third_party/icu',
- },
- 'targets': [
- {
- 'target_name': 'i18n_api',
- 'type': 'static_library',
- 'sources': [
- 'break-iterator.cc',
- 'break-iterator.h',
- 'collator.cc',
- 'collator.h',
- 'datetime-format.cc',
- 'datetime-format.h',
- 'i18n-extension.cc',
- 'i18n-extension.h',
- 'i18n-locale.cc',
- 'i18n-locale.h',
- 'i18n-natives.h',
- 'i18n-utils.cc',
- 'i18n-utils.h',
- 'language-matcher.cc',
- 'language-matcher.h',
- 'number-format.cc',
- 'number-format.h',
- '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
- ],
- 'include_dirs': [
- '<(icu_src_dir)/public/common',
- # v8/ is root for all includes.
- '../../..'
- ],
- 'dependencies': [
- '<(icu_src_dir)/icu.gyp:*',
- 'js2c_i18n#host',
- '../../../tools/gyp/v8.gyp:v8',
- ],
- 'direct_dependent_settings': {
- # Adds -Iv8 for embedders.
- 'include_dirs': [
- '../../..'
- ],
- },
- },
- {
- 'target_name': 'js2c_i18n',
- 'type': 'none',
- 'toolsets': ['host'],
- 'variables': {
- 'js_files': [
- 'i18n.js'
- ],
- },
- 'actions': [
- {
- 'action_name': 'js2c_i18n',
- 'inputs': [
- 'i18n-js2c.py',
- '<@(js_files)',
- ],
- 'outputs': [
- '<(SHARED_INTERMEDIATE_DIR)/i18n-js.cc',
- ],
- 'action': [
- 'python',
- 'i18n-js2c.py',
- '<@(_outputs)',
- '<@(js_files)'
- ],
- },
- ],
- },
- ], # targets
-}
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-extension.cc b/src/3rdparty/v8/src/extensions/experimental/i18n-extension.cc
deleted file mode 100644
index c5afcf0..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-extension.cc
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/i18n-extension.h"
-
-#include "src/extensions/experimental/break-iterator.h"
-#include "src/extensions/experimental/collator.h"
-#include "src/extensions/experimental/datetime-format.h"
-#include "src/extensions/experimental/i18n-locale.h"
-#include "src/extensions/experimental/i18n-natives.h"
-#include "src/extensions/experimental/number-format.h"
-
-namespace v8 {
-namespace internal {
-
-I18NExtension* I18NExtension::extension_ = NULL;
-
-I18NExtension::I18NExtension()
- : v8::Extension("v8/i18n", I18Natives::GetScriptSource()) {
-}
-
-v8::Handle<v8::FunctionTemplate> I18NExtension::GetNativeFunction(
- v8::Handle<v8::String> name) {
- if (name->Equals(v8::String::New("NativeJSLocale"))) {
- return v8::FunctionTemplate::New(I18NLocale::JSLocale);
- } else if (name->Equals(v8::String::New("NativeJSBreakIterator"))) {
- return v8::FunctionTemplate::New(BreakIterator::JSBreakIterator);
- } else if (name->Equals(v8::String::New("NativeJSCollator"))) {
- return v8::FunctionTemplate::New(Collator::JSCollator);
- } else if (name->Equals(v8::String::New("NativeJSDateTimeFormat"))) {
- return v8::FunctionTemplate::New(DateTimeFormat::JSDateTimeFormat);
- } else if (name->Equals(v8::String::New("NativeJSNumberFormat"))) {
- return v8::FunctionTemplate::New(NumberFormat::JSNumberFormat);
- }
-
- return v8::Handle<v8::FunctionTemplate>();
-}
-
-I18NExtension* I18NExtension::get() {
- if (!extension_) {
- extension_ = new I18NExtension();
- }
- return extension_;
-}
-
-void I18NExtension::Register() {
- static v8::DeclareExtension i18n_extension_declaration(I18NExtension::get());
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-js2c.py b/src/3rdparty/v8/src/extensions/experimental/i18n-js2c.py
deleted file mode 100644
index 9c3128b..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-js2c.py
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2011 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This is a utility for converting I18N JavaScript source code into C-style
-# char arrays. It is used for embedded JavaScript code in the V8
-# library.
-# This is a pared down copy of v8/tools/js2c.py that avoids use of
-# v8/src/natives.h and produces different cc template.
-
-import os, re, sys, string
-
-
-def ToCArray(lines):
- result = []
- for chr in lines:
- value = ord(chr)
- assert value < 128
- result.append(str(value))
- result.append("0")
- return ", ".join(result)
-
-
-def RemoveCommentsAndTrailingWhitespace(lines):
- lines = re.sub(r'//.*\n', '\n', lines) # end-of-line comments
- lines = re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', lines) # comments.
- lines = re.sub(r'\s+\n+', '\n', lines) # trailing whitespace
- return lines
-
-
-def ReadFile(filename):
- file = open(filename, "rt")
- try:
- lines = file.read()
- finally:
- file.close()
- return lines
-
-
-EVAL_PATTERN = re.compile(r'\beval\s*\(');
-WITH_PATTERN = re.compile(r'\bwith\s*\(');
-
-
-def Validate(lines, file):
- lines = RemoveCommentsAndTrailingWhitespace(lines)
- # Because of simplified context setup, eval and with is not
- # allowed in the natives files.
- eval_match = EVAL_PATTERN.search(lines)
- if eval_match:
- raise ("Eval disallowed in natives: %s" % file)
- with_match = WITH_PATTERN.search(lines)
- if with_match:
- raise ("With statements disallowed in natives: %s" % file)
-
-
-HEADER_TEMPLATE = """\
-// Copyright 2011 Google Inc. All Rights Reserved.
-
-// This file was generated from .js source files by gyp. If you
-// want to make changes to this file you should either change the
-// javascript source files or the i18n-js2c.py script.
-
-#include "src/extensions/experimental/i18n-natives.h"
-
-namespace v8 {
-namespace internal {
-
-// static
-const char* I18Natives::GetScriptSource() {
- // JavaScript source gets injected here.
- static const char i18n_source[] = {%s};
-
- return i18n_source;
-}
-
-} // internal
-} // v8
-"""
-
-
-def JS2C(source, target):
- filename = str(source)
-
- lines = ReadFile(filename)
- Validate(lines, filename)
- data = ToCArray(lines)
-
- # Emit result
- output = open(target, "w")
- output.write(HEADER_TEMPLATE % data)
- output.close()
-
-
-def main():
- target = sys.argv[1]
- source = sys.argv[2]
- JS2C(source, target)
-
-
-if __name__ == "__main__":
- main()
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-locale.cc b/src/3rdparty/v8/src/extensions/experimental/i18n-locale.cc
deleted file mode 100644
index 46a5f87..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-locale.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/i18n-locale.h"
-
-#include "src/extensions/experimental/i18n-utils.h"
-#include "src/extensions/experimental/language-matcher.h"
-#include "unicode/locid.h"
-#include "unicode/uloc.h"
-
-namespace v8 {
-namespace internal {
-
-const char* const I18NLocale::kLocaleID = "localeID";
-const char* const I18NLocale::kRegionID = "regionID";
-const char* const I18NLocale::kICULocaleID = "icuLocaleID";
-
-v8::Handle<v8::Value> I18NLocale::JSLocale(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 1 || !args[0]->IsObject()) {
- return v8::Undefined();
- }
-
- v8::Local<v8::Object> settings = args[0]->ToObject();
-
- // Get best match for locale.
- v8::TryCatch try_catch;
- v8::Handle<v8::Value> locale_id = settings->Get(v8::String::New(kLocaleID));
- if (try_catch.HasCaught()) {
- return v8::Undefined();
- }
-
- LocaleIDMatch result;
- if (locale_id->IsArray()) {
- LanguageMatcher::GetBestMatchForPriorityList(
- v8::Handle<v8::Array>::Cast(locale_id), &result);
- } else if (locale_id->IsString()) {
- LanguageMatcher::GetBestMatchForString(locale_id->ToString(), &result);
- } else {
- LanguageMatcher::GetBestMatchForString(v8::String::New(""), &result);
- }
-
- // Get best match for region.
- char region_id[ULOC_COUNTRY_CAPACITY];
- I18NUtils::StrNCopy(region_id, ULOC_COUNTRY_CAPACITY, "");
-
- v8::Handle<v8::Value> region = settings->Get(v8::String::New(kRegionID));
- if (try_catch.HasCaught()) {
- return v8::Undefined();
- }
-
- if (!GetBestMatchForRegionID(result.icu_id, region, region_id)) {
- // Set region id to empty string because region couldn't be inferred.
- I18NUtils::StrNCopy(region_id, ULOC_COUNTRY_CAPACITY, "");
- }
-
- // Build JavaScript object that contains bcp and icu locale ID and region ID.
- v8::Handle<v8::Object> locale = v8::Object::New();
- locale->Set(v8::String::New(kLocaleID), v8::String::New(result.bcp47_id));
- locale->Set(v8::String::New(kICULocaleID), v8::String::New(result.icu_id));
- locale->Set(v8::String::New(kRegionID), v8::String::New(region_id));
-
- return handle_scope.Close(locale);
-}
-
-bool I18NLocale::GetBestMatchForRegionID(
- const char* locale_id, v8::Handle<v8::Value> region_id, char* result) {
- if (region_id->IsString() && region_id->ToString()->Length() != 0) {
- icu::Locale user_locale(
- icu::Locale("und", *v8::String::Utf8Value(region_id->ToString())));
- I18NUtils::StrNCopy(
- result, ULOC_COUNTRY_CAPACITY, user_locale.getCountry());
- return true;
- }
- // Maximize locale_id to infer the region (e.g. expand "de" to "de-Latn-DE"
- // and grab "DE" from the result).
- UErrorCode status = U_ZERO_ERROR;
- char maximized_locale[ULOC_FULLNAME_CAPACITY];
- uloc_addLikelySubtags(
- locale_id, maximized_locale, ULOC_FULLNAME_CAPACITY, &status);
- uloc_getCountry(maximized_locale, result, ULOC_COUNTRY_CAPACITY, &status);
-
- return !U_FAILURE(status);
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-utils.cc b/src/3rdparty/v8/src/extensions/experimental/i18n-utils.cc
deleted file mode 100644
index dc2be1a..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-utils.cc
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/i18n-utils.h"
-
-#include <string.h>
-
-#include "unicode/unistr.h"
-
-namespace v8 {
-namespace internal {
-
-// static
-void I18NUtils::StrNCopy(char* dest, int length, const char* src) {
- if (!dest || !src) return;
-
- strncpy(dest, src, length);
- dest[length - 1] = '\0';
-}
-
-// static
-bool I18NUtils::ExtractStringSetting(const v8::Handle<v8::Object>& settings,
- const char* setting,
- icu::UnicodeString* result) {
- if (!setting || !result) return false;
-
- v8::HandleScope handle_scope;
- v8::TryCatch try_catch;
- v8::Handle<v8::Value> value = settings->Get(v8::String::New(setting));
- if (try_catch.HasCaught()) {
- return false;
- }
- // No need to check if |value| is empty because it's taken care of
- // by TryCatch above.
- if (!value->IsUndefined() && !value->IsNull() && value->IsString()) {
- v8::String::Utf8Value utf8_value(value);
- if (*utf8_value == NULL) return false;
- result->setTo(icu::UnicodeString::fromUTF8(*utf8_value));
- return true;
- }
- return false;
-}
-
-// static
-void I18NUtils::AsciiToUChar(const char* source,
- int32_t source_length,
- UChar* target,
- int32_t target_length) {
- int32_t length =
- source_length < target_length ? source_length : target_length;
-
- if (length <= 0) {
- return;
- }
-
- for (int32_t i = 0; i < length - 1; ++i) {
- target[i] = static_cast<UChar>(source[i]);
- }
-
- target[length - 1] = 0x0u;
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n.js b/src/3rdparty/v8/src/extensions/experimental/i18n.js
deleted file mode 100644
index 56bcf9e..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/i18n.js
+++ /dev/null
@@ -1,380 +0,0 @@
-// Copyright 2006-2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// TODO(cira): Rename v8Locale into LocaleInfo once we have stable API.
-/**
- * LocaleInfo class is an aggregate class of all i18n API calls.
- * @param {Object} settings - localeID and regionID to create LocaleInfo from.
- * {Array.<string>|string} settings.localeID -
- * Unicode identifier of the locale.
- * See http://unicode.org/reports/tr35/#BCP_47_Conformance
- * {string} settings.regionID - ISO3166 region ID with addition of
- * invalid, undefined and reserved region codes.
- * @constructor
- */
-v8Locale = function(settings) {
- native function NativeJSLocale();
-
- // Assume user wanted to do v8Locale("sr");
- if (typeof(settings) === "string") {
- settings = {'localeID': settings};
- }
-
- var properties = NativeJSLocale(
- v8Locale.__createSettingsOrDefault(settings, {'localeID': 'root'}));
-
- // Keep the resolved ICU locale ID around to avoid resolving localeID to
- // ICU locale ID every time BreakIterator, Collator and so forth are called.
- this.__icuLocaleID = properties.icuLocaleID;
- this.options = {'localeID': properties.localeID,
- 'regionID': properties.regionID};
-};
-
-/**
- * Clones existing locale with possible overrides for some of the options.
- * @param {!Object} settings - overrides for current locale settings.
- * @returns {Object} - new LocaleInfo object.
- */
-v8Locale.prototype.derive = function(settings) {
- return new v8Locale(
- v8Locale.__createSettingsOrDefault(settings, this.options));
-};
-
-/**
- * v8BreakIterator class implements locale aware segmenatation.
- * It is not part of EcmaScript proposal.
- * @param {Object} locale - locale object to pass to break
- * iterator implementation.
- * @param {string} type - type of segmenatation:
- * - character
- * - word
- * - sentence
- * - line
- * @private
- * @constructor
- */
-v8Locale.v8BreakIterator = function(locale, type) {
- native function NativeJSBreakIterator();
-
- locale = v8Locale.__createLocaleOrDefault(locale);
- // BCP47 ID would work in this case, but we use ICU locale for consistency.
- var iterator = NativeJSBreakIterator(locale.__icuLocaleID, type);
- iterator.type = type;
- return iterator;
-};
-
-/**
- * Type of the break we encountered during previous iteration.
- * @type{Enum}
- */
-v8Locale.v8BreakIterator.BreakType = {
- 'unknown': -1,
- 'none': 0,
- 'number': 100,
- 'word': 200,
- 'kana': 300,
- 'ideo': 400
-};
-
-/**
- * Creates new v8BreakIterator based on current locale.
- * @param {string} - type of segmentation. See constructor.
- * @returns {Object} - new v8BreakIterator object.
- */
-v8Locale.prototype.v8CreateBreakIterator = function(type) {
- return new v8Locale.v8BreakIterator(this, type);
-};
-
-// TODO(jungshik): Set |collator.options| to actually recognized / resolved
-// values.
-/**
- * Collator class implements locale-aware sort.
- * @param {Object} locale - locale object to pass to collator implementation.
- * @param {Object} settings - collation flags:
- * - ignoreCase
- * - ignoreAccents
- * - numeric
- * @private
- * @constructor
- */
-v8Locale.Collator = function(locale, settings) {
- native function NativeJSCollator();
-
- locale = v8Locale.__createLocaleOrDefault(locale);
- var collator = NativeJSCollator(
- locale.__icuLocaleID, v8Locale.__createSettingsOrDefault(settings, {}));
- return collator;
-};
-
-/**
- * Creates new Collator based on current locale.
- * @param {Object} - collation flags. See constructor.
- * @returns {Object} - new Collator object.
- */
-v8Locale.prototype.createCollator = function(settings) {
- return new v8Locale.Collator(this, settings);
-};
-
-/**
- * DateTimeFormat class implements locale-aware date and time formatting.
- * Constructor is not part of public API.
- * @param {Object} locale - locale object to pass to formatter.
- * @param {Object} settings - formatting flags:
- * - skeleton
- * - dateStyle
- * - timeStyle
- * @private
- * @constructor
- */
-v8Locale.__DateTimeFormat = function(locale, settings) {
- native function NativeJSDateTimeFormat();
-
- settings = v8Locale.__createSettingsOrDefault(settings, {});
-
- var cleanSettings = {};
- if (settings.hasOwnProperty('skeleton')) {
- cleanSettings['skeleton'] = settings['skeleton'];
- } else {
- cleanSettings = {};
- if (settings.hasOwnProperty('dateStyle')) {
- var ds = settings['dateStyle'];
- if (!/^(short|medium|long|full)$/.test(ds)) ds = 'short';
- cleanSettings['dateStyle'] = ds;
- } else if (settings.hasOwnProperty('dateType')) {
- // Obsolete. New spec requires dateStyle, but we'll keep this around
- // for current users.
- // TODO(cira): Remove when all internal users switch to dateStyle.
- var dt = settings['dateType'];
- if (!/^(short|medium|long|full)$/.test(dt)) dt = 'short';
- cleanSettings['dateStyle'] = dt;
- }
-
- if (settings.hasOwnProperty('timeStyle')) {
- var ts = settings['timeStyle'];
- if (!/^(short|medium|long|full)$/.test(ts)) ts = 'short';
- cleanSettings['timeStyle'] = ts;
- } else if (settings.hasOwnProperty('timeType')) {
- // TODO(cira): Remove when all internal users switch to timeStyle.
- var tt = settings['timeType'];
- if (!/^(short|medium|long|full)$/.test(tt)) tt = 'short';
- cleanSettings['timeStyle'] = tt;
- }
- }
-
- // Default is to show short date and time.
- if (!cleanSettings.hasOwnProperty('skeleton') &&
- !cleanSettings.hasOwnProperty('dateStyle') &&
- !cleanSettings.hasOwnProperty('timeStyle')) {
- cleanSettings = {'dateStyle': 'short',
- 'timeStyle': 'short'};
- }
-
- locale = v8Locale.__createLocaleOrDefault(locale);
- var formatter = NativeJSDateTimeFormat(locale.__icuLocaleID, cleanSettings);
-
- // NativeJSDateTimeFormat creates formatter.options for us, we just need
- // to append actual settings to it.
- for (key in cleanSettings) {
- formatter.options[key] = cleanSettings[key];
- }
-
- /**
- * Clones existing date time format with possible overrides for some
- * of the options.
- * @param {!Object} overrideSettings - overrides for current format settings.
- * @returns {Object} - new DateTimeFormat object.
- * @public
- */
- formatter.derive = function(overrideSettings) {
- // To remove a setting user can specify undefined as its value. We'll remove
- // it from the map in that case.
- for (var prop in overrideSettings) {
- if (settings.hasOwnProperty(prop) && !overrideSettings[prop]) {
- delete settings[prop];
- }
- }
- return new v8Locale.__DateTimeFormat(
- locale, v8Locale.__createSettingsOrDefault(overrideSettings, settings));
- };
-
- return formatter;
-};
-
-/**
- * Creates new DateTimeFormat based on current locale.
- * @param {Object} - formatting flags. See constructor.
- * @returns {Object} - new DateTimeFormat object.
- */
-v8Locale.prototype.createDateTimeFormat = function(settings) {
- return new v8Locale.__DateTimeFormat(this, settings);
-};
-
-/**
- * NumberFormat class implements locale-aware number formatting.
- * Constructor is not part of public API.
- * @param {Object} locale - locale object to pass to formatter.
- * @param {Object} settings - formatting flags:
- * - skeleton
- * - pattern
- * - style - decimal, currency, percent or scientific
- * - currencyCode - ISO 4217 3-letter currency code
- * @private
- * @constructor
- */
-v8Locale.__NumberFormat = function(locale, settings) {
- native function NativeJSNumberFormat();
-
- settings = v8Locale.__createSettingsOrDefault(settings, {});
-
- var cleanSettings = {};
- if (settings.hasOwnProperty('skeleton')) {
- // Assign skeleton to cleanSettings and fix invalid currency pattern
- // if present - 'ooxo' becomes 'o'.
- cleanSettings['skeleton'] =
- settings['skeleton'].replace(/\u00a4+[^\u00a4]+\u00a4+/g, '\u00a4');
- } else if (settings.hasOwnProperty('pattern')) {
- cleanSettings['pattern'] = settings['pattern'];
- } else if (settings.hasOwnProperty('style')) {
- var style = settings['style'];
- if (!/^(decimal|currency|percent|scientific)$/.test(style)) {
- style = 'decimal';
- }
- cleanSettings['style'] = style;
- }
-
- // Default is to show decimal style.
- if (!cleanSettings.hasOwnProperty('skeleton') &&
- !cleanSettings.hasOwnProperty('pattern') &&
- !cleanSettings.hasOwnProperty('style')) {
- cleanSettings = {'style': 'decimal'};
- }
-
- // Add currency code if available and valid (3-letter ASCII code).
- if (settings.hasOwnProperty('currencyCode') &&
- /^[a-zA-Z]{3}$/.test(settings['currencyCode'])) {
- cleanSettings['currencyCode'] = settings['currencyCode'].toUpperCase();
- }
-
- locale = v8Locale.__createLocaleOrDefault(locale);
- // Pass in region ID for proper currency detection. Use ZZ if region is empty.
- var region = locale.options.regionID !== '' ? locale.options.regionID : 'ZZ';
- var formatter = NativeJSNumberFormat(
- locale.__icuLocaleID, 'und_' + region, cleanSettings);
-
- // ICU doesn't always uppercase the currency code.
- if (formatter.options.hasOwnProperty('currencyCode')) {
- formatter.options['currencyCode'] =
- formatter.options['currencyCode'].toUpperCase();
- }
-
- for (key in cleanSettings) {
- // Don't overwrite keys that are alredy in.
- if (formatter.options.hasOwnProperty(key)) continue;
-
- formatter.options[key] = cleanSettings[key];
- }
-
- /**
- * Clones existing number format with possible overrides for some
- * of the options.
- * @param {!Object} overrideSettings - overrides for current format settings.
- * @returns {Object} - new or cached NumberFormat object.
- * @public
- */
- formatter.derive = function(overrideSettings) {
- // To remove a setting user can specify undefined as its value. We'll remove
- // it from the map in that case.
- for (var prop in overrideSettings) {
- if (settings.hasOwnProperty(prop) && !overrideSettings[prop]) {
- delete settings[prop];
- }
- }
- return new v8Locale.__NumberFormat(
- locale, v8Locale.__createSettingsOrDefault(overrideSettings, settings));
- };
-
- return formatter;
-};
-
-/**
- * Creates new NumberFormat based on current locale.
- * @param {Object} - formatting flags. See constructor.
- * @returns {Object} - new or cached NumberFormat object.
- */
-v8Locale.prototype.createNumberFormat = function(settings) {
- return new v8Locale.__NumberFormat(this, settings);
-};
-
-/**
- * Merges user settings and defaults.
- * Settings that are not of object type are rejected.
- * Actual property values are not validated, but whitespace is trimmed if they
- * are strings.
- * @param {!Object} settings - user provided settings.
- * @param {!Object} defaults - default values for this type of settings.
- * @returns {Object} - valid settings object.
- * @private
- */
-v8Locale.__createSettingsOrDefault = function(settings, defaults) {
- if (!settings || typeof(settings) !== 'object' ) {
- return defaults;
- }
- for (var key in defaults) {
- if (!settings.hasOwnProperty(key)) {
- settings[key] = defaults[key];
- }
- }
- // Clean up settings.
- for (var key in settings) {
- // Trim whitespace.
- if (typeof(settings[key]) === "string") {
- settings[key] = settings[key].trim();
- }
- // Remove all properties that are set to undefined/null. This allows
- // derive method to remove a setting we don't need anymore.
- if (!settings[key]) {
- delete settings[key];
- }
- }
-
- return settings;
-};
-
-/**
- * If locale is valid (defined and of v8Locale type) we return it. If not
- * we create default locale and return it.
- * @param {!Object} locale - user provided locale.
- * @returns {Object} - v8Locale object.
- * @private
- */
-v8Locale.__createLocaleOrDefault = function(locale) {
- if (!locale || !(locale instanceof v8Locale)) {
- return new v8Locale();
- } else {
- return locale;
- }
-};
diff --git a/src/3rdparty/v8/src/extensions/experimental/language-matcher.cc b/src/3rdparty/v8/src/extensions/experimental/language-matcher.cc
deleted file mode 100644
index 127e571..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/language-matcher.cc
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// TODO(cira): Remove LanguageMatcher from v8 when ICU implements
-// language matching API.
-
-#include "src/extensions/experimental/language-matcher.h"
-
-#include <string.h>
-
-#include "src/extensions/experimental/i18n-utils.h"
-#include "unicode/datefmt.h" // For getAvailableLocales
-#include "unicode/locid.h"
-#include "unicode/uloc.h"
-
-namespace v8 {
-namespace internal {
-
-const unsigned int LanguageMatcher::kLanguageWeight = 75;
-const unsigned int LanguageMatcher::kScriptWeight = 20;
-const unsigned int LanguageMatcher::kRegionWeight = 5;
-const unsigned int LanguageMatcher::kThreshold = 50;
-const unsigned int LanguageMatcher::kPositionBonus = 1;
-const char* const LanguageMatcher::kDefaultLocale = "root";
-
-static const char* GetLanguageException(const char*);
-static bool BCP47ToICUFormat(const char*, char*);
-static int CompareLocaleSubtags(const char*, const char*);
-static bool BuildLocaleName(const char*, const char*, LocaleIDMatch*);
-
-LocaleIDMatch::LocaleIDMatch()
- : score(-1) {
- I18NUtils::StrNCopy(
- bcp47_id, ULOC_FULLNAME_CAPACITY, LanguageMatcher::kDefaultLocale);
-
- I18NUtils::StrNCopy(
- icu_id, ULOC_FULLNAME_CAPACITY, LanguageMatcher::kDefaultLocale);
-}
-
-LocaleIDMatch& LocaleIDMatch::operator=(const LocaleIDMatch& rhs) {
- I18NUtils::StrNCopy(this->bcp47_id, ULOC_FULLNAME_CAPACITY, rhs.bcp47_id);
- I18NUtils::StrNCopy(this->icu_id, ULOC_FULLNAME_CAPACITY, rhs.icu_id);
- this->score = rhs.score;
-
- return *this;
-}
-
-// static
-void LanguageMatcher::GetBestMatchForPriorityList(
- v8::Handle<v8::Array> locales, LocaleIDMatch* result) {
- v8::HandleScope handle_scope;
-
- unsigned int position_bonus = locales->Length() * kPositionBonus;
-
- int max_score = 0;
- LocaleIDMatch match;
- for (unsigned int i = 0; i < locales->Length(); ++i) {
- position_bonus -= kPositionBonus;
-
- v8::TryCatch try_catch;
- v8::Local<v8::Value> locale_id = locales->Get(v8::Integer::New(i));
-
- // Return default if exception is raised when reading parameter.
- if (try_catch.HasCaught()) break;
-
- // JavaScript arrays can be heterogenous so check each item
- // if it's a string.
- if (!locale_id->IsString()) continue;
-
- if (!CompareToSupportedLocaleIDList(locale_id->ToString(), &match)) {
- continue;
- }
-
- // Skip items under threshold.
- if (match.score < kThreshold) continue;
-
- match.score += position_bonus;
- if (match.score > max_score) {
- *result = match;
-
- max_score = match.score;
- }
- }
-}
-
-// static
-void LanguageMatcher::GetBestMatchForString(
- v8::Handle<v8::String> locale, LocaleIDMatch* result) {
- LocaleIDMatch match;
-
- if (CompareToSupportedLocaleIDList(locale, &match) &&
- match.score >= kThreshold) {
- *result = match;
- }
-}
-
-// static
-bool LanguageMatcher::CompareToSupportedLocaleIDList(
- v8::Handle<v8::String> locale_id, LocaleIDMatch* result) {
- static int32_t available_count = 0;
- // Depending on how ICU data is built, locales returned by
- // Locale::getAvailableLocale() are not guaranteed to support DateFormat,
- // Collation and other services. We can call getAvailableLocale() of all the
- // services we want to support and take the intersection of them all, but
- // using DateFormat::getAvailableLocales() should suffice.
- // TODO(cira): Maybe make this thread-safe?
- static const icu::Locale* available_locales =
- icu::DateFormat::getAvailableLocales(available_count);
-
- // Skip this locale_id if it's not in ASCII.
- static LocaleIDMatch default_match;
- v8::String::AsciiValue ascii_value(locale_id);
- if (*ascii_value == NULL) return false;
-
- char locale[ULOC_FULLNAME_CAPACITY];
- if (!BCP47ToICUFormat(*ascii_value, locale)) return false;
-
- icu::Locale input_locale(locale);
-
- // Position of the best match locale in list of available locales.
- int position = -1;
- const char* language = GetLanguageException(input_locale.getLanguage());
- const char* script = input_locale.getScript();
- const char* region = input_locale.getCountry();
- for (int32_t i = 0; i < available_count; ++i) {
- int current_score = 0;
- int sign =
- CompareLocaleSubtags(language, available_locales[i].getLanguage());
- current_score += sign * kLanguageWeight;
-
- sign = CompareLocaleSubtags(script, available_locales[i].getScript());
- current_score += sign * kScriptWeight;
-
- sign = CompareLocaleSubtags(region, available_locales[i].getCountry());
- current_score += sign * kRegionWeight;
-
- if (current_score >= kThreshold && current_score > result->score) {
- result->score = current_score;
- position = i;
- }
- }
-
- // Didn't find any good matches so use defaults.
- if (position == -1) return false;
-
- return BuildLocaleName(available_locales[position].getBaseName(),
- input_locale.getName(), result);
-}
-
-// For some unsupported language subtags it is better to fallback to related
-// language that is supported than to default.
-static const char* GetLanguageException(const char* language) {
- // Serbo-croatian to Serbian.
- if (!strcmp(language, "sh")) return "sr";
-
- // Norweigan to Norweiaan to Norwegian Bokmal.
- if (!strcmp(language, "no")) return "nb";
-
- // Moldavian to Romanian.
- if (!strcmp(language, "mo")) return "ro";
-
- // Tagalog to Filipino.
- if (!strcmp(language, "tl")) return "fil";
-
- return language;
-}
-
-// Converts user input from BCP47 locale id format to ICU compatible format.
-// Returns false if uloc_forLanguageTag call fails or if extension is too long.
-static bool BCP47ToICUFormat(const char* locale_id, char* result) {
- UErrorCode status = U_ZERO_ERROR;
- int32_t locale_size = 0;
-
- char locale[ULOC_FULLNAME_CAPACITY];
- I18NUtils::StrNCopy(locale, ULOC_FULLNAME_CAPACITY, locale_id);
-
- // uloc_forLanguageTag has a bug where long extension can crash the code.
- // We need to check if extension part of language id conforms to the length.
- // ICU bug: http://bugs.icu-project.org/trac/ticket/8519
- const char* extension = strstr(locale_id, "-u-");
- if (extension != NULL &&
- strlen(extension) > ULOC_KEYWORD_AND_VALUES_CAPACITY) {
- // Truncate to get non-crashing string, but still preserve base language.
- int base_length = strlen(locale_id) - strlen(extension);
- locale[base_length] = '\0';
- }
-
- uloc_forLanguageTag(locale, result, ULOC_FULLNAME_CAPACITY,
- &locale_size, &status);
- return !U_FAILURE(status);
-}
-
-// Compares locale id subtags.
-// Returns 1 for match or -1 for mismatch.
-static int CompareLocaleSubtags(const char* lsubtag, const char* rsubtag) {
- return strcmp(lsubtag, rsubtag) == 0 ? 1 : -1;
-}
-
-// Builds a BCP47 compliant locale id from base name of matched locale and
-// full user specified locale.
-// Returns false if uloc_toLanguageTag failed to convert locale id.
-// Example:
-// base_name of matched locale (ICU ID): de_DE
-// input_locale_name (ICU ID): de_AT@collation=phonebk
-// result (ICU ID): de_DE@collation=phonebk
-// result (BCP47 ID): de-DE-u-co-phonebk
-static bool BuildLocaleName(const char* base_name,
- const char* input_locale_name,
- LocaleIDMatch* result) {
- I18NUtils::StrNCopy(result->icu_id, ULOC_LANG_CAPACITY, base_name);
-
- // Get extensions (if any) from the original locale.
- const char* extension = strchr(input_locale_name, ULOC_KEYWORD_SEPARATOR);
- if (extension != NULL) {
- I18NUtils::StrNCopy(result->icu_id + strlen(base_name),
- ULOC_KEYWORD_AND_VALUES_CAPACITY, extension);
- } else {
- I18NUtils::StrNCopy(result->icu_id, ULOC_LANG_CAPACITY, base_name);
- }
-
- // Convert ICU locale name into BCP47 format.
- UErrorCode status = U_ZERO_ERROR;
- uloc_toLanguageTag(result->icu_id, result->bcp47_id,
- ULOC_FULLNAME_CAPACITY, false, &status);
- return !U_FAILURE(status);
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/experimental/language-matcher.h b/src/3rdparty/v8/src/extensions/experimental/language-matcher.h
deleted file mode 100644
index dd29304..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/language-matcher.h
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_LANGUAGE_MATCHER_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_LANGUAGE_MATCHER_H_
-
-#include "include/v8.h"
-
-#include "unicode/uloc.h"
-
-namespace v8 {
-namespace internal {
-
-struct LocaleIDMatch {
- LocaleIDMatch();
-
- LocaleIDMatch& operator=(const LocaleIDMatch& rhs);
-
- // Bcp47 locale id - "de-Latn-DE-u-co-phonebk".
- char bcp47_id[ULOC_FULLNAME_CAPACITY];
-
- // ICU locale id - "de_Latn_DE@collation=phonebk".
- char icu_id[ULOC_FULLNAME_CAPACITY];
-
- // Score for this locale.
- int score;
-};
-
-class LanguageMatcher {
- public:
- // Default locale.
- static const char* const kDefaultLocale;
-
- // Finds best supported locale for a given a list of locale identifiers.
- // It preserves the extension for the locale id.
- static void GetBestMatchForPriorityList(
- v8::Handle<v8::Array> locale_list, LocaleIDMatch* result);
-
- // Finds best supported locale for a single locale identifier.
- // It preserves the extension for the locale id.
- static void GetBestMatchForString(
- v8::Handle<v8::String> locale_id, LocaleIDMatch* result);
-
- private:
- // If langauge subtags match add this amount to the score.
- static const unsigned int kLanguageWeight;
-
- // If script subtags match add this amount to the score.
- static const unsigned int kScriptWeight;
-
- // If region subtags match add this amount to the score.
- static const unsigned int kRegionWeight;
-
- // LocaleID match score has to be over this number to accept the match.
- static const unsigned int kThreshold;
-
- // For breaking ties in priority queue.
- static const unsigned int kPositionBonus;
-
- LanguageMatcher();
-
- // Compares locale_id to the supported list of locales and returns best
- // match.
- // Returns false if it fails to convert locale id from ICU to BCP47 format.
- static bool CompareToSupportedLocaleIDList(v8::Handle<v8::String> locale_id,
- LocaleIDMatch* result);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_LANGUAGE_MATCHER_H_
diff --git a/src/3rdparty/v8/src/extensions/experimental/number-format.cc b/src/3rdparty/v8/src/extensions/experimental/number-format.cc
deleted file mode 100644
index 2932c52..0000000
--- a/src/3rdparty/v8/src/extensions/experimental/number-format.cc
+++ /dev/null
@@ -1,374 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "src/extensions/experimental/number-format.h"
-
-#include <string.h>
-
-#include "src/extensions/experimental/i18n-utils.h"
-#include "unicode/dcfmtsym.h"
-#include "unicode/decimfmt.h"
-#include "unicode/locid.h"
-#include "unicode/numfmt.h"
-#include "unicode/uchar.h"
-#include "unicode/ucurr.h"
-#include "unicode/unum.h"
-#include "unicode/uversion.h"
-
-namespace v8 {
-namespace internal {
-
-const int NumberFormat::kCurrencyCodeLength = 4;
-
-v8::Persistent<v8::FunctionTemplate> NumberFormat::number_format_template_;
-
-static icu::DecimalFormat* CreateNumberFormat(v8::Handle<v8::String>,
- v8::Handle<v8::String>,
- v8::Handle<v8::Object>);
-static icu::DecimalFormat* CreateFormatterFromSkeleton(
- const icu::Locale&, const icu::UnicodeString&, UErrorCode*);
-static icu::DecimalFormatSymbols* GetFormatSymbols(const icu::Locale&);
-static bool GetCurrencyCode(const icu::Locale&,
- const char* const,
- v8::Handle<v8::Object>,
- UChar*);
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError();
-
-icu::DecimalFormat* NumberFormat::UnpackNumberFormat(
- v8::Handle<v8::Object> obj) {
- if (number_format_template_->HasInstance(obj)) {
- return static_cast<icu::DecimalFormat*>(
- obj->GetPointerFromInternalField(0));
- }
-
- return NULL;
-}
-
-void NumberFormat::DeleteNumberFormat(v8::Persistent<v8::Value> object,
- void* param) {
- v8::Persistent<v8::Object> persistent_object =
- v8::Persistent<v8::Object>::Cast(object);
-
- // First delete the hidden C++ object.
- // Unpacking should never return NULL here. That would only happen if
- // this method is used as the weak callback for persistent handles not
- // pointing to a number formatter.
- delete UnpackNumberFormat(persistent_object);
-
- // Then dispose of the persistent handle to JS object.
- persistent_object.Dispose();
-}
-
-v8::Handle<v8::Value> NumberFormat::Format(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- if (args.Length() != 1 || !args[0]->IsNumber()) {
- // Just return NaN on invalid input.
- return v8::String::New("NaN");
- }
-
- icu::DecimalFormat* number_format = UnpackNumberFormat(args.Holder());
- if (!number_format) {
- return ThrowUnexpectedObjectError();
- }
-
- // ICU will handle actual NaN value properly and return NaN string.
- icu::UnicodeString result;
- number_format->format(args[0]->NumberValue(), result);
-
- return v8::String::New(
- reinterpret_cast<const uint16_t*>(result.getBuffer()), result.length());
-}
-
-v8::Handle<v8::Value> NumberFormat::JSNumberFormat(const v8::Arguments& args) {
- v8::HandleScope handle_scope;
-
- // Expect locale id, region id and settings.
- if (args.Length() != 3 ||
- !args[0]->IsString() || !args[1]->IsString() || !args[2]->IsObject()) {
- return v8::ThrowException(v8::Exception::SyntaxError(
- v8::String::New("Locale, region and number settings are required.")));
- }
-
- icu::DecimalFormat* number_format = CreateNumberFormat(
- args[0]->ToString(), args[1]->ToString(), args[2]->ToObject());
-
- if (number_format_template_.IsEmpty()) {
- v8::Local<v8::FunctionTemplate> raw_template(v8::FunctionTemplate::New());
-
- raw_template->SetClassName(v8::String::New("v8Locale.NumberFormat"));
-
- // Define internal field count on instance template.
- v8::Local<v8::ObjectTemplate> object_template =
- raw_template->InstanceTemplate();
-
- // Set aside internal field for icu number formatter.
- object_template->SetInternalFieldCount(1);
-
- // Define all of the prototype methods on prototype template.
- v8::Local<v8::ObjectTemplate> proto = raw_template->PrototypeTemplate();
- proto->Set(v8::String::New("format"),
- v8::FunctionTemplate::New(Format));
-
- number_format_template_ =
- v8::Persistent<v8::FunctionTemplate>::New(raw_template);
- }
-
- // Create an empty object wrapper.
- v8::Local<v8::Object> local_object =
- number_format_template_->GetFunction()->NewInstance();
- v8::Persistent<v8::Object> wrapper =
- v8::Persistent<v8::Object>::New(local_object);
-
- // Set number formatter as internal field of the resulting JS object.
- wrapper->SetPointerInInternalField(0, number_format);
-
- // Create options key.
- v8::Local<v8::Object> options = v8::Object::New();
-
- // Show what ICU decided to use for easier problem tracking.
- // Keep it as v8 specific extension.
- icu::UnicodeString pattern;
- number_format->toPattern(pattern);
- options->Set(v8::String::New("v8ResolvedPattern"),
- v8::String::New(reinterpret_cast<const uint16_t*>(
- pattern.getBuffer()), pattern.length()));
-
- // Set resolved currency code in options.currency if not empty.
- icu::UnicodeString currency(number_format->getCurrency());
- if (!currency.isEmpty()) {
- options->Set(v8::String::New("currencyCode"),
- v8::String::New(reinterpret_cast<const uint16_t*>(
- currency.getBuffer()), currency.length()));
- }
-
- wrapper->Set(v8::String::New("options"), options);
-
- // Make object handle weak so we can delete iterator once GC kicks in.
- wrapper.MakeWeak(NULL, DeleteNumberFormat);
-
- return wrapper;
-}
-
-// Returns DecimalFormat.
-static icu::DecimalFormat* CreateNumberFormat(v8::Handle<v8::String> locale,
- v8::Handle<v8::String> region,
- v8::Handle<v8::Object> settings) {
- v8::HandleScope handle_scope;
-
- v8::String::AsciiValue ascii_locale(locale);
- icu::Locale icu_locale(*ascii_locale);
-
- // Make formatter from skeleton.
- icu::DecimalFormat* number_format = NULL;
- UErrorCode status = U_ZERO_ERROR;
- icu::UnicodeString setting;
-
- if (I18NUtils::ExtractStringSetting(settings, "skeleton", &setting)) {
- // TODO(cira): Use ICU skeleton once
- // http://bugs.icu-project.org/trac/ticket/8610 is resolved.
- number_format = CreateFormatterFromSkeleton(icu_locale, setting, &status);
- } else if (I18NUtils::ExtractStringSetting(settings, "pattern", &setting)) {
- number_format =
- new icu::DecimalFormat(setting, GetFormatSymbols(icu_locale), status);
- } else if (I18NUtils::ExtractStringSetting(settings, "style", &setting)) {
- if (setting == UNICODE_STRING_SIMPLE("currency")) {
- number_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createCurrencyInstance(icu_locale, status));
- } else if (setting == UNICODE_STRING_SIMPLE("percent")) {
- number_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createPercentInstance(icu_locale, status));
- } else if (setting == UNICODE_STRING_SIMPLE("scientific")) {
- number_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createScientificInstance(icu_locale, status));
- } else {
- // Make it decimal in any other case.
- number_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createInstance(icu_locale, status));
- }
- }
-
- if (U_FAILURE(status)) {
- delete number_format;
- status = U_ZERO_ERROR;
- number_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createInstance(icu_locale, status));
- }
-
- // Attach appropriate currency code to the formatter.
- // It affects currency formatters only.
- // Region is full language identifier in form 'und_' + region id.
- v8::String::AsciiValue ascii_region(region);
-
- UChar currency_code[NumberFormat::kCurrencyCodeLength];
- if (GetCurrencyCode(icu_locale, *ascii_region, settings, currency_code)) {
- number_format->setCurrency(currency_code, status);
- }
-
- return number_format;
-}
-
-// Generates ICU number format pattern from given skeleton.
-// TODO(cira): Remove once ICU includes equivalent method
-// (see http://bugs.icu-project.org/trac/ticket/8610).
-static icu::DecimalFormat* CreateFormatterFromSkeleton(
- const icu::Locale& icu_locale,
- const icu::UnicodeString& skeleton,
- UErrorCode* status) {
- icu::DecimalFormat skeleton_format(
- skeleton, GetFormatSymbols(icu_locale), *status);
-
- // Find out if skeleton contains currency or percent symbol and create
- // proper instance to tweak.
- icu::DecimalFormat* base_format = NULL;
-
- // UChar representation of U+00A4 currency symbol.
- const UChar currency_symbol = 0xA4u;
-
- int32_t index = skeleton.indexOf(currency_symbol);
- if (index != -1) {
- // Find how many U+00A4 are there. There is at least one.
- // Case of non-consecutive U+00A4 is taken care of in i18n.js.
- int32_t end_index = skeleton.lastIndexOf(currency_symbol, index);
-
-#if (U_ICU_VERSION_MAJOR_NUM == 4) && (U_ICU_VERSION_MINOR_NUM <= 6)
- icu::NumberFormat::EStyles style;
- switch (end_index - index) {
- case 0:
- style = icu::NumberFormat::kCurrencyStyle;
- break;
- case 1:
- style = icu::NumberFormat::kIsoCurrencyStyle;
- break;
- default:
- style = icu::NumberFormat::kPluralCurrencyStyle;
- }
-#else // ICU version is 4.8 or above (we ignore versions below 4.0).
- UNumberFormatStyle style;
- switch (end_index - index) {
- case 0:
- style = UNUM_CURRENCY;
- break;
- case 1:
- style = UNUM_CURRENCY_ISO;
- break;
- default:
- style = UNUM_CURRENCY_PLURAL;
- }
-#endif
-
- base_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createInstance(icu_locale, style, *status));
- } else if (skeleton.indexOf('%') != -1) {
- base_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createPercentInstance(icu_locale, *status));
- } else {
- // TODO(cira): Handle scientific skeleton.
- base_format = static_cast<icu::DecimalFormat*>(
- icu::NumberFormat::createInstance(icu_locale, *status));
- }
-
- if (U_FAILURE(*status)) {
- delete base_format;
- return NULL;
- }
-
- // Copy important information from skeleton to the new formatter.
- // TODO(cira): copy rounding information from skeleton?
- base_format->setGroupingUsed(skeleton_format.isGroupingUsed());
-
- base_format->setMinimumIntegerDigits(
- skeleton_format.getMinimumIntegerDigits());
-
- base_format->setMinimumFractionDigits(
- skeleton_format.getMinimumFractionDigits());
-
- base_format->setMaximumFractionDigits(
- skeleton_format.getMaximumFractionDigits());
-
- return base_format;
-}
-
-// Gets decimal symbols for a locale.
-static icu::DecimalFormatSymbols* GetFormatSymbols(
- const icu::Locale& icu_locale) {
- UErrorCode status = U_ZERO_ERROR;
- icu::DecimalFormatSymbols* symbols =
- new icu::DecimalFormatSymbols(icu_locale, status);
-
- if (U_FAILURE(status)) {
- delete symbols;
- // Use symbols from default locale.
- symbols = new icu::DecimalFormatSymbols(status);
- }
-
- return symbols;
-}
-
-// Gets currency ISO 4217 3-letter code.
-// Check currencyCode setting first, then @currency=code and in the end
-// try to infer currency code from locale in the form 'und_' + region id.
-// Returns false in case of error.
-static bool GetCurrencyCode(const icu::Locale& icu_locale,
- const char* const und_region_locale,
- v8::Handle<v8::Object> settings,
- UChar* code) {
- UErrorCode status = U_ZERO_ERROR;
-
- // If there is user specified currency code, use it.
- icu::UnicodeString currency;
- if (I18NUtils::ExtractStringSetting(settings, "currencyCode", &currency)) {
- currency.extract(code, NumberFormat::kCurrencyCodeLength, status);
- return true;
- }
-
- // If ICU locale has -cu- currency code use it.
- char currency_code[NumberFormat::kCurrencyCodeLength];
- int32_t length = icu_locale.getKeywordValue(
- "currency", currency_code, NumberFormat::kCurrencyCodeLength, status);
- if (length != 0) {
- I18NUtils::AsciiToUChar(currency_code, length + 1,
- code, NumberFormat::kCurrencyCodeLength);
- return true;
- }
-
- // Otherwise infer currency code from the region id.
- ucurr_forLocale(
- und_region_locale, code, NumberFormat::kCurrencyCodeLength, &status);
-
- return !!U_SUCCESS(status);
-}
-
-// Throws a JavaScript exception.
-static v8::Handle<v8::Value> ThrowUnexpectedObjectError() {
- // Returns undefined, and schedules an exception to be thrown.
- return v8::ThrowException(v8::Exception::Error(
- v8::String::New("NumberFormat method called on an object "
- "that is not a NumberFormat.")));
-}
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/externalize-string-extension.cc b/src/3rdparty/v8/src/extensions/externalize-string-extension.cc
index 9fbf329..50d8761 100644
--- a/src/3rdparty/v8/src/extensions/externalize-string-extension.cc
+++ b/src/3rdparty/v8/src/extensions/externalize-string-extension.cc
@@ -133,11 +133,8 @@ v8::Handle<v8::Value> ExternalizeStringExtension::IsAscii(
void ExternalizeStringExtension::Register() {
- static ExternalizeStringExtension* externalize_extension = NULL;
- if (externalize_extension == NULL)
- externalize_extension = new ExternalizeStringExtension;
- static v8::DeclareExtension externalize_extension_declaration(
- externalize_extension);
+ static ExternalizeStringExtension externalize_extension;
+ static v8::DeclareExtension declaration(&externalize_extension);
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/extensions/gc-extension.cc b/src/3rdparty/v8/src/extensions/gc-extension.cc
index 48e8c42..f921552 100644
--- a/src/3rdparty/v8/src/extensions/gc-extension.cc
+++ b/src/3rdparty/v8/src/extensions/gc-extension.cc
@@ -40,14 +40,14 @@ v8::Handle<v8::FunctionTemplate> GCExtension::GetNativeFunction(
v8::Handle<v8::Value> GCExtension::GC(const v8::Arguments& args) {
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags, "gc extension");
return v8::Undefined();
}
void GCExtension::Register() {
static GCExtension gc_extension;
- static v8::DeclareExtension gc_extension_declaration(&gc_extension);
+ static v8::DeclareExtension declaration(&gc_extension);
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/factory.cc b/src/3rdparty/v8/src/factory.cc
index 76ca69d..e8a9f26 100644
--- a/src/3rdparty/v8/src/factory.cc
+++ b/src/3rdparty/v8/src/factory.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -77,11 +77,21 @@ Handle<StringDictionary> Factory::NewStringDictionary(int at_least_space_for) {
}
-Handle<NumberDictionary> Factory::NewNumberDictionary(int at_least_space_for) {
+Handle<SeededNumberDictionary> Factory::NewSeededNumberDictionary(
+ int at_least_space_for) {
ASSERT(0 <= at_least_space_for);
CALL_HEAP_FUNCTION(isolate(),
- NumberDictionary::Allocate(at_least_space_for),
- NumberDictionary);
+ SeededNumberDictionary::Allocate(at_least_space_for),
+ SeededNumberDictionary);
+}
+
+
+Handle<UnseededNumberDictionary> Factory::NewUnseededNumberDictionary(
+ int at_least_space_for) {
+ ASSERT(0 <= at_least_space_for);
+ CALL_HEAP_FUNCTION(isolate(),
+ UnseededNumberDictionary::Allocate(at_least_space_for),
+ UnseededNumberDictionary);
}
@@ -131,6 +141,20 @@ Handle<DeoptimizationOutputData> Factory::NewDeoptimizationOutputData(
}
+Handle<AccessorPair> Factory::NewAccessorPair() {
+ CALL_HEAP_FUNCTION(isolate(),
+ isolate()->heap()->AllocateAccessorPair(),
+ AccessorPair);
+}
+
+
+Handle<TypeFeedbackInfo> Factory::NewTypeFeedbackInfo() {
+ CALL_HEAP_FUNCTION(isolate(),
+ isolate()->heap()->AllocateTypeFeedbackInfo(),
+ TypeFeedbackInfo);
+}
+
+
// Symbols are created in the old generation (data space).
Handle<String> Factory::LookupSymbol(Vector<const char> string) {
CALL_HEAP_FUNCTION(isolate(),
@@ -303,7 +327,7 @@ Handle<Context> Factory::NewWithContext(Handle<JSFunction> function,
Handle<Context> Factory::NewBlockContext(
Handle<JSFunction> function,
Handle<Context> previous,
- Handle<SerializedScopeInfo> scope_info) {
+ Handle<ScopeInfo> scope_info) {
CALL_HEAP_FUNCTION(
isolate(),
isolate()->heap()->AllocateBlockContext(*function,
@@ -358,6 +382,8 @@ Handle<Script> Factory::NewScript(Handle<String> source) {
script->set_context_data(heap->undefined_value());
script->set_type(Smi::FromInt(Script::TYPE_NORMAL));
script->set_compilation_type(Smi::FromInt(Script::COMPILATION_TYPE_HOST));
+ script->set_compilation_state(
+ Smi::FromInt(Script::COMPILATION_STATE_INITIAL));
script->set_wrapper(*wrapper);
script->set_line_ends(heap->undefined_value());
script->set_eval_from_shared(heap->undefined_value());
@@ -468,8 +494,9 @@ Handle<Map> Factory::CopyMapDropTransitions(Handle<Map> src) {
Handle<Map> Factory::GetElementsTransitionMap(
Handle<JSObject> src,
ElementsKind elements_kind) {
- CALL_HEAP_FUNCTION(isolate(),
- src->GetElementsTransitionMap(elements_kind),
+ Isolate* i = isolate();
+ CALL_HEAP_FUNCTION(i,
+ src->GetElementsTransitionMap(i, elements_kind),
Map);
}
@@ -505,11 +532,15 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
PretenureFlag pretenure) {
Handle<JSFunction> result = BaseNewFunctionFromSharedFunctionInfo(
function_info,
- function_info->strict_mode()
- ? isolate()->strict_mode_function_map()
- : isolate()->function_map(),
+ function_info->is_classic_mode()
+ ? isolate()->function_map()
+ : isolate()->strict_mode_function_map(),
pretenure);
+ if (function_info->ic_age() != isolate()->heap()->global_ic_age()) {
+ function_info->ResetForNewContext(isolate()->heap()->global_ic_age());
+ }
+
result->set_context(*context);
if (!function_info->bound()) {
int number_of_literals = function_info->num_literals();
@@ -522,16 +553,13 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
context->global_context());
}
result->set_literals(*literals);
- } else {
- result->set_function_bindings(isolate()->heap()->empty_fixed_array());
}
- result->set_next_function_link(isolate()->heap()->undefined_value());
-
if (V8::UseCrankshaft() &&
FLAG_always_opt &&
result->is_compiled() &&
!function_info->is_toplevel() &&
- function_info->allows_lazy_compilation()) {
+ function_info->allows_lazy_compilation() &&
+ !function_info->optimization_disabled()) {
result->MarkForLazyRecompilation();
}
return result;
@@ -546,17 +574,19 @@ Handle<Object> Factory::NewNumber(double value,
}
-Handle<Object> Factory::NewNumberFromInt(int value) {
+Handle<Object> Factory::NewNumberFromInt(int32_t value,
+ PretenureFlag pretenure) {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->NumberFromInt32(value), Object);
+ isolate()->heap()->NumberFromInt32(value, pretenure), Object);
}
-Handle<Object> Factory::NewNumberFromUint(uint32_t value) {
+Handle<Object> Factory::NewNumberFromUint(uint32_t value,
+ PretenureFlag pretenure) {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->NumberFromUint32(value), Object);
+ isolate()->heap()->NumberFromUint32(value, pretenure), Object);
}
@@ -696,7 +726,7 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name,
// Allocate the function
Handle<JSFunction> function = NewFunction(name, the_hole_value());
- // Setup the code pointer in both the shared function info and in
+ // Set up the code pointer in both the shared function info and in
// the function itself.
function->shared()->set_code(*code);
function->set_code(*code);
@@ -727,7 +757,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
// Allocate the function.
Handle<JSFunction> function = NewFunction(name, prototype);
- // Setup the code pointer in both the shared function info and in
+ // Set up the code pointer in both the shared function info and in
// the function itself.
function->shared()->set_code(*code);
function->set_code(*code);
@@ -735,12 +765,9 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
if (force_initial_map ||
type != JS_OBJECT_TYPE ||
instance_size != JSObject::kHeaderSize) {
- ElementsKind default_elements_kind = FLAG_smi_only_arrays
- ? FAST_SMI_ONLY_ELEMENTS
- : FAST_ELEMENTS;
Handle<Map> initial_map = NewMap(type,
instance_size,
- default_elements_kind);
+ FAST_SMI_ONLY_ELEMENTS);
function->set_initial_map(*initial_map);
initial_map->set_constructor(*function);
}
@@ -749,7 +776,10 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
// property that refers to the function.
SetPrototypeProperty(function, prototype);
// Currently safe because it is only invoked from Genesis.
- SetLocalPropertyNoThrow(prototype, constructor_symbol(), function, DONT_ENUM);
+ CHECK_NOT_EMPTY_HANDLE(isolate(),
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ prototype, constructor_symbol(),
+ function, DONT_ENUM));
return function;
}
@@ -757,7 +787,7 @@ Handle<JSFunction> Factory::NewFunctionWithPrototype(Handle<String> name,
Handle<JSFunction> Factory::NewFunctionWithoutPrototype(Handle<String> name,
Handle<Code> code) {
Handle<JSFunction> function = NewFunctionWithoutPrototype(name,
- kNonStrictMode);
+ CLASSIC_MODE);
function->shared()->set_code(*code);
function->set_code(*code);
ASSERT(!function->has_initial_map());
@@ -766,11 +796,11 @@ Handle<JSFunction> Factory::NewFunctionWithoutPrototype(Handle<String> name,
}
-Handle<SerializedScopeInfo> Factory::NewSerializedScopeInfo(int length) {
+Handle<ScopeInfo> Factory::NewScopeInfo(int length) {
CALL_HEAP_FUNCTION(
isolate(),
- isolate()->heap()->AllocateSerializedScopeInfo(length),
- SerializedScopeInfo);
+ isolate()->heap()->AllocateScopeInfo(length),
+ ScopeInfo);
}
@@ -844,8 +874,8 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors(
// Copy the descriptors from the array.
for (int i = 0; i < array->number_of_descriptors(); i++) {
- if (array->GetType(i) != NULL_DESCRIPTOR) {
- result->CopyFrom(descriptor_count++, *array, i, witness);
+ if (!array->IsNullDescriptor(i)) {
+ DescriptorArray::CopyFrom(result, descriptor_count++, array, i, witness);
}
}
@@ -879,7 +909,7 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors(
Handle<DescriptorArray> new_result =
NewDescriptorArray(number_of_descriptors);
for (int i = 0; i < number_of_descriptors; i++) {
- new_result->CopyFrom(i, *result, i, witness);
+ DescriptorArray::CopyFrom(new_result, i, result, i, witness);
}
result = new_result;
}
@@ -916,36 +946,62 @@ Handle<JSObject> Factory::NewJSObjectFromMap(Handle<Map> map) {
Handle<JSArray> Factory::NewJSArray(int capacity,
+ ElementsKind elements_kind,
PretenureFlag pretenure) {
- Handle<JSObject> obj = NewJSObject(isolate()->array_function(), pretenure);
CALL_HEAP_FUNCTION(isolate(),
- Handle<JSArray>::cast(obj)->Initialize(capacity),
+ isolate()->heap()->AllocateJSArrayAndStorage(
+ elements_kind,
+ 0,
+ capacity,
+ INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE,
+ pretenure),
JSArray);
}
-Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArray> elements,
+Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements,
+ ElementsKind elements_kind,
PretenureFlag pretenure) {
- Handle<JSArray> result =
- Handle<JSArray>::cast(NewJSObject(isolate()->array_function(),
- pretenure));
- SetContent(result, elements);
- return result;
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateJSArrayWithElements(*elements,
+ elements_kind,
+ pretenure),
+ JSArray);
+}
+
+
+void Factory::SetElementsCapacityAndLength(Handle<JSArray> array,
+ int capacity,
+ int length) {
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ CALL_HEAP_FUNCTION_VOID(
+ isolate(),
+ accessor->SetCapacityAndLength(*array, capacity, length));
}
void Factory::SetContent(Handle<JSArray> array,
- Handle<FixedArray> elements) {
+ Handle<FixedArrayBase> elements) {
CALL_HEAP_FUNCTION_VOID(
isolate(),
array->SetContent(*elements));
}
-void Factory::EnsureCanContainNonSmiElements(Handle<JSArray> array) {
+void Factory::EnsureCanContainHeapObjectElements(Handle<JSArray> array) {
CALL_HEAP_FUNCTION_VOID(
isolate(),
- array->EnsureCanContainNonSmiElements());
+ array->EnsureCanContainHeapObjectElements());
+}
+
+
+void Factory::EnsureCanContainElements(Handle<JSArray> array,
+ Handle<FixedArrayBase> elements,
+ EnsureElementsMode mode) {
+ CALL_HEAP_FUNCTION_VOID(
+ isolate(),
+ array->EnsureCanContainElements(*elements, mode));
}
@@ -985,7 +1041,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
Handle<String> name,
int number_of_literals,
Handle<Code> code,
- Handle<SerializedScopeInfo> scope_info) {
+ Handle<ScopeInfo> scope_info) {
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name);
shared->set_code(*code);
shared->set_scope_info(*scope_info);
@@ -1039,13 +1095,23 @@ Handle<String> Factory::Uint32ToString(uint32_t value) {
}
-Handle<NumberDictionary> Factory::DictionaryAtNumberPut(
- Handle<NumberDictionary> dictionary,
+Handle<SeededNumberDictionary> Factory::DictionaryAtNumberPut(
+ Handle<SeededNumberDictionary> dictionary,
+ uint32_t key,
+ Handle<Object> value) {
+ CALL_HEAP_FUNCTION(isolate(),
+ dictionary->AtNumberPut(key, *value),
+ SeededNumberDictionary);
+}
+
+
+Handle<UnseededNumberDictionary> Factory::DictionaryAtNumberPut(
+ Handle<UnseededNumberDictionary> dictionary,
uint32_t key,
Handle<Object> value) {
CALL_HEAP_FUNCTION(isolate(),
dictionary->AtNumberPut(key, *value),
- NumberDictionary);
+ UnseededNumberDictionary);
}
@@ -1071,11 +1137,11 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name,
Handle<JSFunction> Factory::NewFunctionWithoutPrototypeHelper(
Handle<String> name,
- StrictModeFlag strict_mode) {
+ LanguageMode language_mode) {
Handle<SharedFunctionInfo> function_share = NewSharedFunctionInfo(name);
- Handle<Map> map = strict_mode == kStrictMode
- ? isolate()->strict_mode_function_without_prototype_map()
- : isolate()->function_without_prototype_map();
+ Handle<Map> map = (language_mode == CLASSIC_MODE)
+ ? isolate()->function_without_prototype_map()
+ : isolate()->strict_mode_function_without_prototype_map();
CALL_HEAP_FUNCTION(isolate(),
isolate()->heap()->AllocateFunction(
*map,
@@ -1087,8 +1153,9 @@ Handle<JSFunction> Factory::NewFunctionWithoutPrototypeHelper(
Handle<JSFunction> Factory::NewFunctionWithoutPrototype(
Handle<String> name,
- StrictModeFlag strict_mode) {
- Handle<JSFunction> fun = NewFunctionWithoutPrototypeHelper(name, strict_mode);
+ LanguageMode language_mode) {
+ Handle<JSFunction> fun =
+ NewFunctionWithoutPrototypeHelper(name, language_mode);
fun->set_context(isolate()->context()->global_context());
return fun;
}
@@ -1152,24 +1219,15 @@ Handle<JSFunction> Factory::CreateApiFunction(
Handle<Code> construct_stub = isolate()->builtins()->JSConstructStubApi();
int internal_field_count = 0;
- bool has_external_resource = false;
- bool use_user_object_comparison = false;
-
if (!obj->instance_template()->IsUndefined()) {
Handle<ObjectTemplateInfo> instance_template =
Handle<ObjectTemplateInfo>(
ObjectTemplateInfo::cast(obj->instance_template()));
internal_field_count =
Smi::cast(instance_template->internal_field_count())->value();
- has_external_resource =
- !instance_template->has_external_resource()->IsUndefined();
- use_user_object_comparison =
- !instance_template->use_user_object_comparison()->IsUndefined();
}
int instance_size = kPointerSize * internal_field_count;
- if (has_external_resource) instance_size += kPointerSize;
-
InstanceType type = INVALID_TYPE;
switch (instance_type) {
case JavaScriptObject:
@@ -1204,16 +1262,6 @@ Handle<JSFunction> Factory::CreateApiFunction(
Handle<Map> map = Handle<Map>(result->initial_map());
- // Mark as having external data object if needed
- if (has_external_resource) {
- map->set_has_external_resource(true);
- }
-
- // Mark as using user object comparison if needed
- if (use_user_object_comparison) {
- map->set_use_user_object_comparison(true);
- }
-
// Mark as undetectable if needed.
if (obj->undetectable()) {
map->set_is_undetectable();
@@ -1232,9 +1280,6 @@ Handle<JSFunction> Factory::CreateApiFunction(
// Set interceptor information in the map.
if (!obj->named_property_handler()->IsUndefined()) {
map->set_has_named_interceptor();
- InterceptorInfo *nph = InterceptorInfo::cast(obj->named_property_handler());
- bool is_fallback = nph->is_fallback()->IsUndefined()?false:nph->is_fallback()->value();
- map->set_named_interceptor_is_fallback(is_fallback);
}
if (!obj->indexed_property_handler()->IsUndefined()) {
map->set_has_indexed_interceptor();
diff --git a/src/3rdparty/v8/src/factory.h b/src/3rdparty/v8/src/factory.h
index 2073ce3..786d4a9 100644
--- a/src/3rdparty/v8/src/factory.h
+++ b/src/3rdparty/v8/src/factory.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -54,7 +54,11 @@ class Factory {
int size,
PretenureFlag pretenure = NOT_TENURED);
- Handle<NumberDictionary> NewNumberDictionary(int at_least_space_for);
+ Handle<SeededNumberDictionary> NewSeededNumberDictionary(
+ int at_least_space_for);
+
+ Handle<UnseededNumberDictionary> NewUnseededNumberDictionary(
+ int at_least_space_for);
Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
@@ -69,6 +73,10 @@ class Factory {
Handle<DeoptimizationOutputData> NewDeoptimizationOutputData(
int deopt_entry_count,
PretenureFlag pretenure);
+ // Allocates a pre-tenured empty AccessorPair.
+ Handle<AccessorPair> NewAccessorPair();
+
+ Handle<TypeFeedbackInfo> NewTypeFeedbackInfo();
Handle<String> LookupSymbol(Vector<const char> str);
Handle<String> LookupSymbol(Handle<String> str);
@@ -172,7 +180,7 @@ class Factory {
// Create a 'block' context.
Handle<Context> NewBlockContext(Handle<JSFunction> function,
Handle<Context> previous,
- Handle<SerializedScopeInfo> scope_info);
+ Handle<ScopeInfo> scope_info);
// Return the Symbol matching the passed in string.
Handle<String> SymbolFromString(Handle<String> value);
@@ -227,12 +235,14 @@ class Factory {
Handle<FixedDoubleArray> CopyFixedDoubleArray(
Handle<FixedDoubleArray> array);
- // Numbers (eg, literals) are pretenured by the parser.
+ // Numbers (e.g. literals) are pretenured by the parser.
Handle<Object> NewNumber(double value,
PretenureFlag pretenure = NOT_TENURED);
- Handle<Object> NewNumberFromInt(int value);
- Handle<Object> NewNumberFromUint(uint32_t value);
+ Handle<Object> NewNumberFromInt(int32_t value,
+ PretenureFlag pretenure = NOT_TENURED);
+ Handle<Object> NewNumberFromUint(uint32_t value,
+ PretenureFlag pretenure = NOT_TENURED);
// These objects are used by the api to create env-independent data
// structures in the heap.
@@ -254,15 +264,24 @@ class Factory {
// JS arrays are pretenured when allocated by the parser.
Handle<JSArray> NewJSArray(int capacity,
+ ElementsKind elements_kind = FAST_ELEMENTS,
PretenureFlag pretenure = NOT_TENURED);
Handle<JSArray> NewJSArrayWithElements(
- Handle<FixedArray> elements,
+ Handle<FixedArrayBase> elements,
+ ElementsKind elements_kind = FAST_ELEMENTS,
PretenureFlag pretenure = NOT_TENURED);
- void SetContent(Handle<JSArray> array, Handle<FixedArray> elements);
+ void SetElementsCapacityAndLength(Handle<JSArray> array,
+ int capacity,
+ int length);
+
+ void SetContent(Handle<JSArray> array, Handle<FixedArrayBase> elements);
- void EnsureCanContainNonSmiElements(Handle<JSArray> array);
+ void EnsureCanContainHeapObjectElements(Handle<JSArray> array);
+ void EnsureCanContainElements(Handle<JSArray> array,
+ Handle<FixedArrayBase> elements,
+ EnsureElementsMode mode);
Handle<JSProxy> NewJSProxy(Handle<Object> handler, Handle<Object> prototype);
@@ -277,7 +296,7 @@ class Factory {
Handle<JSFunction> NewFunctionWithoutPrototype(
Handle<String> name,
- StrictModeFlag strict_mode);
+ LanguageMode language_mode);
Handle<JSFunction> NewFunction(Handle<Object> super, bool is_global);
@@ -291,7 +310,7 @@ class Factory {
Handle<Context> context,
PretenureFlag pretenure = TENURED);
- Handle<SerializedScopeInfo> NewSerializedScopeInfo(int length);
+ Handle<ScopeInfo> NewScopeInfo(int length);
Handle<Code> NewCode(const CodeDesc& desc,
Code::Flags flags,
@@ -409,7 +428,7 @@ class Factory {
Handle<String> name,
int number_of_literals,
Handle<Code> code,
- Handle<SerializedScopeInfo> scope_info);
+ Handle<ScopeInfo> scope_info);
Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
Handle<JSMessageObject> NewJSMessageObject(
@@ -421,8 +440,13 @@ class Factory {
Handle<Object> stack_trace,
Handle<Object> stack_frames);
- Handle<NumberDictionary> DictionaryAtNumberPut(
- Handle<NumberDictionary>,
+ Handle<SeededNumberDictionary> DictionaryAtNumberPut(
+ Handle<SeededNumberDictionary>,
+ uint32_t key,
+ Handle<Object> value);
+
+ Handle<UnseededNumberDictionary> DictionaryAtNumberPut(
+ Handle<UnseededNumberDictionary>,
uint32_t key,
Handle<Object> value);
@@ -467,7 +491,7 @@ class Factory {
Handle<JSFunction> NewFunctionWithoutPrototypeHelper(
Handle<String> name,
- StrictModeFlag strict_mode);
+ LanguageMode language_mode);
Handle<DescriptorArray> CopyAppendCallbackDescriptors(
Handle<DescriptorArray> array,
diff --git a/src/3rdparty/v8/src/fast-dtoa.h b/src/3rdparty/v8/src/fast-dtoa.h
index 94c22ec..ef28557 100644
--- a/src/3rdparty/v8/src/fast-dtoa.h
+++ b/src/3rdparty/v8/src/fast-dtoa.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,7 +43,7 @@ enum FastDtoaMode {
// FastDtoa will produce at most kFastDtoaMaximalLength digits. This does not
// include the terminating '\0' character.
-static const int kFastDtoaMaximalLength = 17;
+const int kFastDtoaMaximalLength = 17;
// Provides a decimal representation of v.
// The result should be interpreted as buffer * 10^(point - length).
diff --git a/src/3rdparty/v8/src/flag-definitions.h b/src/3rdparty/v8/src/flag-definitions.h
index 3cd0b72..75697a8 100644
--- a/src/3rdparty/v8/src/flag-definitions.h
+++ b/src/3rdparty/v8/src/flag-definitions.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,6 +41,7 @@
extern ctype FLAG_##nam;
#define FLAG_READONLY(ftype, ctype, nam, def, cmt) \
static ctype const FLAG_##nam = def;
+#define DEFINE_implication(whenflag, thenflag)
// We want to supply the actual storage and value for the flag variable in the
// .cc file. We only do this for writable flags.
@@ -48,6 +49,7 @@
#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
ctype FLAG_##nam = def;
#define FLAG_READONLY(ftype, ctype, nam, def, cmt)
+#define DEFINE_implication(whenflag, thenflag)
// We need to define all of our default values so that the Flag structure can
// access them by pointer. These are just used internally inside of one .cc,
@@ -56,7 +58,7 @@
#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
static ctype const FLAGDEFAULT_##nam = def;
#define FLAG_READONLY(ftype, ctype, nam, def, cmt)
-
+#define DEFINE_implication(whenflag, thenflag)
// We want to write entries into our meta data table, for internal parsing and
// printing / etc in the flag parser code. We only do this for writable flags.
@@ -64,6 +66,14 @@
#define FLAG_FULL(ftype, ctype, nam, def, cmt) \
{ Flag::TYPE_##ftype, #nam, &FLAG_##nam, &FLAGDEFAULT_##nam, cmt, false },
#define FLAG_READONLY(ftype, ctype, nam, def, cmt)
+#define DEFINE_implication(whenflag, thenflag)
+
+// We produce the code to set flags when it is implied by another flag.
+#elif defined(FLAG_MODE_DEFINE_IMPLICATIONS)
+#define FLAG_FULL(ftype, ctype, nam, def, cmt)
+#define FLAG_READONLY(ftype, ctype, nam, def, cmt)
+#define DEFINE_implication(whenflag, thenflag) \
+ if (FLAG_##whenflag) FLAG_##thenflag = true;
#else
#error No mode supplied when including flags.defs
@@ -71,17 +81,41 @@
#ifdef FLAG_MODE_DECLARE
// Structure used to hold a collection of arguments to the JavaScript code.
+#define JSARGUMENTS_INIT {{}}
struct JSArguments {
public:
- JSArguments();
- JSArguments(int argc, const char** argv);
- int argc() const;
- const char** argv();
- const char*& operator[](int idx);
- JSArguments& operator=(JSArguments args);
+ inline int argc() const {
+ return static_cast<int>(storage_[0]);
+ }
+ inline const char** argv() const {
+ return reinterpret_cast<const char**>(storage_[1]);
+ }
+ inline const char*& operator[] (int idx) const {
+ return argv()[idx];
+ }
+ inline JSArguments& operator=(JSArguments args) {
+ set_argc(args.argc());
+ set_argv(args.argv());
+ return *this;
+ }
+ static JSArguments Create(int argc, const char** argv) {
+ JSArguments args;
+ args.set_argc(argc);
+ args.set_argv(argv);
+ return args;
+ }
private:
- int argc_;
- const char** argv_;
+ void set_argc(int argc) {
+ storage_[0] = argc;
+ }
+ void set_argv(const char** argv) {
+ storage_[1] = reinterpret_cast<AtomicWord>(argv);
+ }
+public:
+ // Contains argc and argv. Unfortunately we have to store these two fields
+ // into a single one to avoid making the initialization macro (which would be
+ // "{ 0, NULL }") contain a coma.
+ AtomicWord storage_[2];
};
#endif
@@ -96,40 +130,49 @@ private:
//
#define FLAG FLAG_FULL
-// Flags for experimental language features.
+// Flags for language modes and experimental language features.
+DEFINE_bool(use_strict, false, "enforce strict mode")
+
DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_scoping, false, "enable harmony block scoping")
+DEFINE_bool(harmony_modules, false,
+ "enable harmony modules (implies block scoping)")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
DEFINE_bool(harmony_collections, false,
"enable harmony collections (sets, maps, and weak maps)")
-DEFINE_bool(harmony, false, "enable all harmony features")
+DEFINE_bool(harmony, false, "enable all harmony features (except typeof)")
+DEFINE_implication(harmony, harmony_scoping)
+DEFINE_implication(harmony, harmony_modules)
+DEFINE_implication(harmony, harmony_proxies)
+DEFINE_implication(harmony, harmony_collections)
+DEFINE_implication(harmony_modules, harmony_scoping)
// Flags for experimental implementation features.
-DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")
-DEFINE_bool(smi_only_arrays, false, "tracks arrays with only smi values")
-DEFINE_bool(string_slices, true, "use string slices")
-
+DEFINE_bool(smi_only_arrays, true, "tracks arrays with only smi values")
DEFINE_bool(clever_optimizations,
true,
"Optimize object size, Array shift, DOM strings and string +")
+// Flags for data representation optimizations
+DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")
+DEFINE_bool(string_slices, true, "use string slices")
+
// Flags for Crankshaft.
DEFINE_bool(crankshaft, true, "use crankshaft")
-DEFINE_string(hydrogen_filter, "", "hydrogen use/trace filter")
-DEFINE_bool(use_hydrogen, true, "use generated hydrogen for compilation")
-DEFINE_bool(build_lithium, true, "use lithium chunk builder")
-DEFINE_bool(alloc_lithium, true, "use lithium register allocator")
-DEFINE_bool(use_lithium, true, "use lithium code generator")
+DEFINE_string(hydrogen_filter, "", "optimization filter")
DEFINE_bool(use_range, true, "use hydrogen range analysis")
DEFINE_bool(eliminate_dead_phis, true, "eliminate dead phis")
DEFINE_bool(use_gvn, true, "use hydrogen global value numbering")
DEFINE_bool(use_canonicalizing, true, "use hydrogen instruction canonicalizing")
DEFINE_bool(use_inlining, true, "use function inlining")
DEFINE_bool(limit_inlining, true, "limit code size growth from inlining")
-DEFINE_bool(eliminate_empty_blocks, true, "eliminate empty blocks")
DEFINE_bool(loop_invariant_code_motion, true, "loop invariant code motion")
+DEFINE_bool(collect_megamorphic_maps_from_stub_cache,
+ true,
+ "crankshaft harvests type feedback from stub cache")
DEFINE_bool(hydrogen_stats, false, "print statistics for hydrogen")
DEFINE_bool(trace_hydrogen, false, "trace generated hydrogen to file")
+DEFINE_string(trace_phase, "Z", "trace generated IR for specified phases")
DEFINE_bool(trace_inlining, false, "trace inlining decisions")
DEFINE_bool(trace_alloc, false, "trace register allocator")
DEFINE_bool(trace_all_uses, false, "trace all use positions")
@@ -149,13 +192,49 @@ DEFINE_bool(use_osr, true, "use on-stack replacement")
DEFINE_bool(trace_osr, false, "trace on-stack replacement")
DEFINE_int(stress_runs, 0, "number of stress runs")
DEFINE_bool(optimize_closures, true, "optimize closures")
+DEFINE_bool(inline_construct, true, "inline constructor calls")
+DEFINE_bool(inline_arguments, true, "inline functions with arguments object")
+DEFINE_int(loop_weight, 1, "loop weight for representation inference")
+
+DEFINE_bool(optimize_for_in, true,
+ "optimize functions containing for-in loops")
+
+// Experimental profiler changes.
+DEFINE_bool(experimental_profiler, true, "enable all profiler experiments")
+DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability")
+DEFINE_int(frame_count, 1, "number of stack frames inspected by the profiler")
+DEFINE_bool(self_optimization, false,
+ "primitive functions trigger their own optimization")
+DEFINE_bool(direct_self_opt, false,
+ "call recompile stub directly when self-optimizing")
+DEFINE_bool(retry_self_opt, false, "re-try self-optimization if it failed")
+DEFINE_bool(count_based_interrupts, false,
+ "trigger profiler ticks based on counting instead of timing")
+DEFINE_bool(interrupt_at_exit, false,
+ "insert an interrupt check at function exit")
+DEFINE_bool(weighted_back_edges, false,
+ "weight back edges by jump distance for interrupt triggering")
+DEFINE_int(interrupt_budget, 5900,
+ "execution budget before interrupt is triggered")
+DEFINE_int(type_info_threshold, 15,
+ "percentage of ICs that must have type info to allow optimization")
+DEFINE_int(self_opt_count, 130, "call count before self-optimization")
+
+DEFINE_implication(experimental_profiler, watch_ic_patching)
+DEFINE_implication(experimental_profiler, self_optimization)
+// Not implying direct_self_opt here because it seems to be a bad idea.
+DEFINE_implication(experimental_profiler, retry_self_opt)
+DEFINE_implication(experimental_profiler, count_based_interrupts)
+DEFINE_implication(experimental_profiler, interrupt_at_exit)
+DEFINE_implication(experimental_profiler, weighted_back_edges)
+
+DEFINE_bool(trace_opt_verbose, false, "extra verbose compilation tracing")
+DEFINE_implication(trace_opt_verbose, trace_opt)
// assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false,
"generate extra code (assertions) for debugging")
DEFINE_bool(code_comments, false, "emit comments in code disassembly")
-DEFINE_bool(peephole_optimization, true,
- "perform peephole optimizations in assembly code")
DEFINE_bool(enable_sse2, true,
"enable use of SSE2 instructions if available")
DEFINE_bool(enable_sse3, true,
@@ -205,10 +284,8 @@ DEFINE_bool(lazy, true, "use lazy compilation")
DEFINE_bool(trace_opt, false, "trace lazy optimization")
DEFINE_bool(trace_opt_stats, false, "trace lazy optimization statistics")
DEFINE_bool(opt, true, "use adaptive optimizations")
-DEFINE_bool(opt_eagerly, false, "be more eager when adaptively optimizing")
DEFINE_bool(always_opt, false, "always try to optimize functions")
DEFINE_bool(prepare_always_opt, false, "prepare for turning on always opt")
-DEFINE_bool(deopt, true, "support deoptimization")
DEFINE_bool(trace_deopt, false, "trace deoptimization")
// compiler.cc
@@ -229,12 +306,14 @@ DEFINE_bool(trace_debug_json, false, "trace debugging JSON request/response")
DEFINE_bool(debugger_auto_break, true,
"automatically set the debug break flag when debugger commands are "
"in the queue")
-DEFINE_bool(breakpoint_relocation, true, "relocate breakpoints to the next executable line")
DEFINE_bool(enable_liveedit, true, "enable liveedit experimental feature")
+DEFINE_bool(break_on_abort, true, "always cause a debug break before aborting")
// execution.cc
-DEFINE_int(stack_size, kPointerSize * 128,
- "default size of stack region v8 is allowed to use (in KkBytes)")
+// Slightly less than 1MB on 64-bit, since Windows' default stack size for
+// the main execution thread is 1MB for both 32 and 64-bit.
+DEFINE_int(stack_size, kPointerSize * 123,
+ "default size of stack region v8 is allowed to use (in kBytes)")
// frames.cc
DEFINE_int(max_stack_trace_source_length, 300,
@@ -273,6 +352,9 @@ DEFINE_bool(trace_incremental_marking, false,
// v8.cc
DEFINE_bool(use_idle_notification, true,
"Use idle notification to reduce memory footprint.")
+
+DEFINE_bool(send_idle_notification, false,
+ "Send idle notifcation between stress runs.")
// ic.cc
DEFINE_bool(use_ic, true, "use inline caching")
@@ -290,11 +372,10 @@ DEFINE_bool(native_code_counters, false,
DEFINE_bool(always_compact, false, "Perform compaction on every full GC")
DEFINE_bool(lazy_sweeping, true,
"Use lazy sweeping for old pointer and data spaces")
-DEFINE_bool(cleanup_caches_in_maps_at_gc, true,
- "Flush code caches in maps during mark compact cycle.")
DEFINE_bool(never_compact, false,
"Never perform compaction on full GC - testing only")
-DEFINE_bool(compact_code_space, false, "Compact code space")
+DEFINE_bool(compact_code_space, true,
+ "Compact code space on full non-incremental collections")
DEFINE_bool(cleanup_code_caches_at_gc, true,
"Flush inline caches prior to mark compact collection and "
"flush code caches in maps during mark compact cycle.")
@@ -302,24 +383,11 @@ DEFINE_int(random_seed, 0,
"Default seed for initializing random generator "
"(0, the default, means to use system random).")
-DEFINE_bool(canonicalize_object_literal_maps, true,
- "Canonicalize maps for object literals.")
-
-DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1,
- "Maximum number of pages in map space which still allows to encode "
- "forwarding pointers. That's actually a constant, but it's useful "
- "to control it with a flag for better testing.")
-
-// mksnapshot.cc
-DEFINE_bool(h, false, "print this message")
-DEFINE_bool(new_snapshot, true, "use new snapshot implementation")
-
// objects.cc
DEFINE_bool(use_verbose_printer, true, "allows verbose printing")
// parser.cc
DEFINE_bool(allow_natives_syntax, false, "allow natives syntax")
-DEFINE_bool(strict_mode, true, "allow strict mode directives")
// simulator-arm.cc and simulator-mips.cc
DEFINE_bool(trace_sim, false, "Trace simulator execution")
@@ -334,6 +402,14 @@ DEFINE_bool(trace_exception, false,
"print stack trace when throwing exceptions")
DEFINE_bool(preallocate_message_memory, false,
"preallocate some memory to build stack traces.")
+DEFINE_bool(randomize_hashes,
+ true,
+ "randomize hashes to avoid predictable hash collisions "
+ "(with snapshots this option cannot override the baked-in seed)")
+DEFINE_int(hash_seed,
+ 0,
+ "Fixed seed to use to hash property keys (0 means random)"
+ "(with snapshots this option cannot override the baked-in seed)")
// v8.cc
DEFINE_bool(preemption, false,
@@ -372,7 +448,7 @@ DEFINE_int(debugger_port, 5858, "Port to use for remote debugging")
#endif // ENABLE_DEBUGGER_SUPPORT
DEFINE_string(map_counters, "", "Map counters to a file")
-DEFINE_args(js_arguments, JSArguments(),
+DEFINE_args(js_arguments, JSARGUMENTS_INIT,
"Pass all remaining arguments to the script. Alias for \"--\".")
#if defined(WEBOS__)
@@ -427,9 +503,6 @@ DEFINE_bool(print_builtin_source, false,
"pretty print source code for builtins")
DEFINE_bool(print_ast, false, "print source AST")
DEFINE_bool(print_builtin_ast, false, "print source AST for builtins")
-DEFINE_bool(print_json_ast, false, "print source AST as JSON")
-DEFINE_bool(print_builtin_json_ast, false,
- "print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc
@@ -451,6 +524,11 @@ DEFINE_bool(print_global_handles, false, "report global handles after GC")
// ic.cc
DEFINE_bool(trace_ic, false, "trace inline cache state transitions")
+// interface.cc
+DEFINE_bool(print_interfaces, false, "print interfaces")
+DEFINE_bool(print_interface_details, false, "print interface inference details")
+DEFINE_int(print_interface_depth, 5, "depth for printing interfaces")
+
// objects.cc
DEFINE_bool(trace_normalization,
false,
@@ -459,10 +537,6 @@ DEFINE_bool(trace_normalization,
// runtime.cc
DEFINE_bool(trace_lazy, false, "trace lazy compilation")
-// serialize.cc
-DEFINE_bool(debug_serialization, false,
- "write debug information into the snapshot.")
-
// spaces.cc
DEFINE_bool(collect_heap_spill_statistics, false,
"report heap spill statistics along with heap_stats "
@@ -532,6 +606,13 @@ DEFINE_bool(trace_elements_transitions, false, "trace elements transitions")
// code-stubs.cc
DEFINE_bool(print_code_stubs, false, "print code stubs")
+DEFINE_bool(test_secondary_stub_cache,
+ false,
+ "test secondary stub cache by disabling the primary one")
+
+DEFINE_bool(test_primary_stub_cache,
+ false,
+ "test primary stub cache by disabling the secondary one")
// codegen-ia32.cc / codegen-arm.cc
DEFINE_bool(print_code, false, "print generated code")
@@ -541,6 +622,20 @@ DEFINE_bool(print_unopt_code, false, "print unoptimized code before "
DEFINE_bool(print_code_verbose, false, "print more information for code")
DEFINE_bool(print_builtin_code, false, "print generated code for builtins")
+#ifdef ENABLE_DISASSEMBLER
+DEFINE_bool(print_all_code, false, "enable all flags related to printing code")
+DEFINE_implication(print_all_code, print_code)
+DEFINE_implication(print_all_code, print_opt_code)
+DEFINE_implication(print_all_code, print_unopt_code)
+DEFINE_implication(print_all_code, print_code_verbose)
+DEFINE_implication(print_all_code, print_builtin_code)
+DEFINE_implication(print_all_code, print_code_stubs)
+DEFINE_implication(print_all_code, code_comments)
+#ifdef DEBUG
+DEFINE_implication(print_all_code, trace_codegen)
+#endif
+#endif
+
// Cleanup...
#undef FLAG_FULL
#undef FLAG_READONLY
@@ -549,8 +644,10 @@ DEFINE_bool(print_builtin_code, false, "print generated code for builtins")
#undef DEFINE_bool
#undef DEFINE_int
#undef DEFINE_string
+#undef DEFINE_implication
#undef FLAG_MODE_DECLARE
#undef FLAG_MODE_DEFINE
#undef FLAG_MODE_DEFINE_DEFAULTS
#undef FLAG_MODE_META
+#undef FLAG_MODE_DEFINE_IMPLICATIONS
diff --git a/src/3rdparty/v8/src/flags.cc b/src/3rdparty/v8/src/flags.cc
index ab5b57c..5720cbd 100644
--- a/src/3rdparty/v8/src/flags.cc
+++ b/src/3rdparty/v8/src/flags.cc
@@ -411,7 +411,7 @@ int FlagList::SetFlagsFromCommandLine(int* argc,
for (int k = i; k < *argc; k++) {
js_argv[k - start_pos] = StrDup(argv[k]);
}
- *flag->args_variable() = JSArguments(js_argc, js_argv);
+ *flag->args_variable() = JSArguments::Create(js_argc, js_argv);
i = *argc; // Consume all arguments
break;
}
@@ -534,18 +534,10 @@ void FlagList::PrintHelp() {
}
}
-JSArguments::JSArguments()
- : argc_(0), argv_(NULL) {}
-JSArguments::JSArguments(int argc, const char** argv)
- : argc_(argc), argv_(argv) {}
-int JSArguments::argc() const { return argc_; }
-const char** JSArguments::argv() { return argv_; }
-const char*& JSArguments::operator[](int idx) { return argv_[idx]; }
-JSArguments& JSArguments::operator=(JSArguments args) {
- argc_ = args.argc_;
- argv_ = args.argv_;
- return *this;
-}
+void FlagList::EnforceFlagImplications() {
+#define FLAG_MODE_DEFINE_IMPLICATIONS
+#include "flag-definitions.h"
+}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/flags.h b/src/3rdparty/v8/src/flags.h
index f9cbde0..f0b239b 100644
--- a/src/3rdparty/v8/src/flags.h
+++ b/src/3rdparty/v8/src/flags.h
@@ -72,6 +72,9 @@ class FlagList {
// Print help to stdout with flags, types, and default values.
static void PrintHelp();
+
+ // Set flags as consequence of being implied by another flag.
+ static void EnforceFlagImplications();
};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/frames-inl.h b/src/3rdparty/v8/src/frames-inl.h
index 94c745c..27a526c 100644
--- a/src/3rdparty/v8/src/frames-inl.h
+++ b/src/3rdparty/v8/src/frames-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -68,7 +68,7 @@ inline bool StackHandler::includes(Address address) const {
inline void StackHandler::Iterate(ObjectVisitor* v, Code* holder) const {
v->VisitPointer(context_address());
- StackFrame::IteratePc(v, pc_address(), holder);
+ v->VisitPointer(code_address());
}
@@ -77,24 +77,24 @@ inline StackHandler* StackHandler::FromAddress(Address address) {
}
-inline bool StackHandler::is_entry() const {
- return state() == ENTRY;
+inline bool StackHandler::is_js_entry() const {
+ return kind() == JS_ENTRY;
}
-inline bool StackHandler::is_try_catch() const {
- return state() == TRY_CATCH;
+inline bool StackHandler::is_catch() const {
+ return kind() == CATCH;
}
-inline bool StackHandler::is_try_finally() const {
- return state() == TRY_FINALLY;
+inline bool StackHandler::is_finally() const {
+ return kind() == FINALLY;
}
-inline StackHandler::State StackHandler::state() const {
+inline StackHandler::Kind StackHandler::kind() const {
const int offset = StackHandlerConstants::kStateOffset;
- return static_cast<State>(Memory::int_at(address() + offset));
+ return KindField::decode(Memory::unsigned_at(address() + offset));
}
@@ -104,9 +104,9 @@ inline Object** StackHandler::context_address() const {
}
-inline Address* StackHandler::pc_address() const {
- const int offset = StackHandlerConstants::kPCOffset;
- return reinterpret_cast<Address*>(address() + offset);
+inline Object** StackHandler::code_address() const {
+ const int offset = StackHandlerConstants::kCodeOffset;
+ return reinterpret_cast<Object**>(address() + offset);
}
@@ -191,7 +191,7 @@ inline bool StandardFrame::IsArgumentsAdaptorFrame(Address fp) {
inline bool StandardFrame::IsConstructFrame(Address fp) {
Object* marker =
Memory::Object_at(fp + StandardFrameConstants::kMarkerOffset);
- return marker == Smi::FromInt(CONSTRUCT);
+ return marker == Smi::FromInt(StackFrame::CONSTRUCT);
}
diff --git a/src/3rdparty/v8/src/frames.cc b/src/3rdparty/v8/src/frames.cc
index 7c4c573..5911284 100644
--- a/src/3rdparty/v8/src/frames.cc
+++ b/src/3rdparty/v8/src/frames.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,6 +31,7 @@
#include "deoptimizer.h"
#include "frames-inl.h"
#include "full-codegen.h"
+#include "lazy-instance.h"
#include "mark-compact.h"
#include "safepoint-table.h"
#include "scopeinfo.h"
@@ -41,6 +42,22 @@
namespace v8 {
namespace internal {
+
+static ReturnAddressLocationResolver return_address_location_resolver = NULL;
+
+
+// Resolves pc_address through the resolution address function if one is set.
+static inline Address* ResolveReturnAddressLocation(Address* pc_address) {
+ if (return_address_location_resolver == NULL) {
+ return pc_address;
+ } else {
+ return reinterpret_cast<Address*>(
+ return_address_location_resolver(
+ reinterpret_cast<uintptr_t>(pc_address)));
+ }
+}
+
+
// Iterator that supports traversing the stack handlers of a
// particular frame. Needs to know the top of the handler chain.
class StackHandlerIterator BASE_EMBEDDED {
@@ -155,8 +172,8 @@ void StackFrameIterator::Reset() {
ASSERT(fp_ != NULL);
state.fp = fp_;
state.sp = sp_;
- state.pc_address =
- reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_));
+ state.pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(StandardFrame::ComputePCAddress(fp_)));
type = StackFrame::ComputeType(isolate(), &state);
}
if (SingletonFor(type) == NULL) return;
@@ -414,6 +431,13 @@ void StackFrame::IteratePc(ObjectVisitor* v,
}
+void StackFrame::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver) {
+ ASSERT(return_address_location_resolver == NULL);
+ return_address_location_resolver = resolver;
+}
+
+
StackFrame::Type StackFrame::ComputeType(Isolate* isolate, State* state) {
ASSERT(state->fp != NULL);
if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
@@ -485,11 +509,11 @@ Code* ExitFrame::unchecked_code() const {
void ExitFrame::ComputeCallerState(State* state) const {
- // Setup the caller state.
+ // Set up the caller state.
state->sp = caller_sp();
state->fp = Memory::Address_at(fp() + ExitFrameConstants::kCallerFPOffset);
- state->pc_address
- = reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset);
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset));
}
@@ -523,7 +547,8 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
void ExitFrame::FillState(Address fp, Address sp, State* state) {
state->sp = sp;
state->fp = fp;
- state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(sp - 1 * kPointerSize));
}
@@ -558,7 +583,8 @@ int StandardFrame::ComputeExpressionsCount() const {
void StandardFrame::ComputeCallerState(State* state) const {
state->sp = caller_sp();
state->fp = caller_fp();
- state->pc_address = reinterpret_cast<Address*>(ComputePCAddress(fp()));
+ state->pc_address = ResolveReturnAddressLocation(
+ reinterpret_cast<Address*>(ComputePCAddress(fp())));
}
@@ -723,12 +749,17 @@ void JavaScriptFrame::PrintTop(FILE* file,
JavaScriptFrame* frame = it.frame();
if (frame->IsConstructor()) PrintF(file, "new ");
// function name
- Object* fun = frame->function();
- if (fun->IsJSFunction()) {
- SharedFunctionInfo* shared = JSFunction::cast(fun)->shared();
- shared->DebugName()->ShortPrint(file);
+ Object* maybe_fun = frame->function();
+ if (maybe_fun->IsJSFunction()) {
+ JSFunction* fun = JSFunction::cast(maybe_fun);
+ fun->PrintName();
+ Code* js_code = frame->unchecked_code();
+ Address pc = frame->pc();
+ int code_offset =
+ static_cast<int>(pc - js_code->instruction_start());
+ PrintF("+%d", code_offset);
+ SharedFunctionInfo* shared = fun->shared();
if (print_line_number) {
- Address pc = frame->pc();
Code* code = Code::cast(
v8::internal::Isolate::Current()->heap()->FindCodeObject(pc));
int source_pos = code->SourcePosition(pc);
@@ -751,7 +782,7 @@ void JavaScriptFrame::PrintTop(FILE* file,
}
}
} else {
- fun->ShortPrint(file);
+ PrintF("<unknown>");
}
if (print_args) {
@@ -808,18 +839,16 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
data->TranslationIndex(deopt_index)->value());
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
ASSERT(opcode == Translation::BEGIN);
- int frame_count = it.Next();
+ it.Next(); // Drop frame count.
+ int jsframe_count = it.Next();
// We create the summary in reverse order because the frames
// in the deoptimization translation are ordered bottom-to-top.
- int i = frame_count;
+ bool is_constructor = IsConstructor();
+ int i = jsframe_count;
while (i > 0) {
opcode = static_cast<Translation::Opcode>(it.Next());
- if (opcode == Translation::FRAME) {
- // We don't inline constructor calls, so only the first, outermost
- // frame can be a constructor frame in case of inlining.
- bool is_constructor = (i == frame_count) && IsConstructor();
-
+ if (opcode == Translation::JS_FRAME) {
i--;
int ast_id = it.Next();
int function_id = it.Next();
@@ -869,11 +898,18 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
FrameSummary summary(receiver, function, code, pc_offset, is_constructor);
frames->Add(summary);
+ is_constructor = false;
+ } else if (opcode == Translation::CONSTRUCT_STUB_FRAME) {
+ // The next encountered JS_FRAME will be marked as a constructor call.
+ it.Skip(Translation::NumberOfOperandsFor(opcode));
+ ASSERT(!is_constructor);
+ is_constructor = true;
} else {
// Skip over operands to advance to the next opcode.
it.Skip(Translation::NumberOfOperandsFor(opcode));
}
}
+ ASSERT(!is_constructor);
}
@@ -913,8 +949,9 @@ int OptimizedFrame::GetInlineCount() {
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
ASSERT(opcode == Translation::BEGIN);
USE(opcode);
- int frame_count = it.Next();
- return frame_count;
+ it.Next(); // Drop frame count.
+ int jsframe_count = it.Next();
+ return jsframe_count;
}
@@ -929,14 +966,15 @@ void OptimizedFrame::GetFunctions(List<JSFunction*>* functions) {
data->TranslationIndex(deopt_index)->value());
Translation::Opcode opcode = static_cast<Translation::Opcode>(it.Next());
ASSERT(opcode == Translation::BEGIN);
- int frame_count = it.Next();
+ it.Next(); // Drop frame count.
+ int jsframe_count = it.Next();
// We insert the frames in reverse order because the frames
// in the deoptimization translation are ordered bottom-to-top.
- while (frame_count > 0) {
+ while (jsframe_count > 0) {
opcode = static_cast<Translation::Opcode>(it.Next());
- if (opcode == Translation::FRAME) {
- frame_count--;
+ if (opcode == Translation::JS_FRAME) {
+ jsframe_count--;
it.Next(); // Skip ast id.
int function_id = it.Next();
it.Next(); // Skip height.
@@ -1002,11 +1040,15 @@ void JavaScriptFrame::Print(StringStream* accumulator,
if (IsConstructor()) accumulator->Add("new ");
accumulator->PrintFunction(function, receiver, &code);
- Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
+ // Get scope information for nicer output, if possible. If code is NULL, or
+ // doesn't contain scope info, scope_info will return 0 for the number of
+ // parameters, stack local variables, context local variables, stack slots,
+ // or context slots.
+ Handle<ScopeInfo> scope_info(ScopeInfo::Empty());
if (function->IsJSFunction()) {
Handle<SharedFunctionInfo> shared(JSFunction::cast(function)->shared());
- scope_info = Handle<SerializedScopeInfo>(shared->scope_info());
+ scope_info = Handle<ScopeInfo>(shared->scope_info());
Object* script_obj = shared->script();
if (script_obj->IsScript()) {
Handle<Script> script(Script::cast(script_obj));
@@ -1031,11 +1073,6 @@ void JavaScriptFrame::Print(StringStream* accumulator,
accumulator->Add("(this=%o", receiver);
- // Get scope information for nicer output, if possible. If code is
- // NULL, or doesn't contain scope info, info will return 0 for the
- // number of parameters, stack slots, or context slots.
- ScopeInfo<PreallocatedStorage> info(*scope_info);
-
// Print the parameters.
int parameters_count = ComputeParametersCount();
for (int i = 0; i < parameters_count; i++) {
@@ -1043,8 +1080,8 @@ void JavaScriptFrame::Print(StringStream* accumulator,
// If we have a name for the parameter we print it. Nameless
// parameters are either because we have more actual parameters
// than formal parameters or because we have no scope information.
- if (i < info.number_of_parameters()) {
- accumulator->PrintName(*info.parameter_name(i));
+ if (i < scope_info->ParameterCount()) {
+ accumulator->PrintName(scope_info->ParameterName(i));
accumulator->Add("=");
}
accumulator->Add("%o", GetParameter(i));
@@ -1062,8 +1099,8 @@ void JavaScriptFrame::Print(StringStream* accumulator,
accumulator->Add(" {\n");
// Compute the number of locals and expression stack elements.
- int stack_locals_count = info.number_of_stack_slots();
- int heap_locals_count = info.number_of_context_slots();
+ int stack_locals_count = scope_info->StackLocalCount();
+ int heap_locals_count = scope_info->ContextLocalCount();
int expressions_count = ComputeExpressionsCount();
// Print stack-allocated local variables.
@@ -1072,7 +1109,7 @@ void JavaScriptFrame::Print(StringStream* accumulator,
}
for (int i = 0; i < stack_locals_count; i++) {
accumulator->Add(" var ");
- accumulator->PrintName(*info.stack_slot_name(i));
+ accumulator->PrintName(scope_info->StackLocalName(i));
accumulator->Add(" = ");
if (i < expressions_count) {
accumulator->Add("%o", GetExpression(i));
@@ -1089,16 +1126,16 @@ void JavaScriptFrame::Print(StringStream* accumulator,
}
// Print heap-allocated local variables.
- if (heap_locals_count > Context::MIN_CONTEXT_SLOTS) {
+ if (heap_locals_count > 0) {
accumulator->Add(" // heap-allocated locals\n");
}
- for (int i = Context::MIN_CONTEXT_SLOTS; i < heap_locals_count; i++) {
+ for (int i = 0; i < heap_locals_count; i++) {
accumulator->Add(" var ");
- accumulator->PrintName(*info.context_slot_name(i));
+ accumulator->PrintName(scope_info->ContextLocalName(i));
accumulator->Add(" = ");
if (context != NULL) {
if (i < context->length()) {
- accumulator->Add("%o", context->get(i));
+ accumulator->Add("%o", context->get(Context::MIN_CONTEXT_SLOTS + i));
} else {
accumulator->Add(
"// warning: missing context slot - inconsistent frame?");
@@ -1167,7 +1204,7 @@ void EntryFrame::Iterate(ObjectVisitor* v) const {
StackHandlerIterator it(this, top_handler());
ASSERT(!it.done());
StackHandler* handler = it.handler();
- ASSERT(handler->is_entry());
+ ASSERT(handler->is_js_entry());
handler->Iterate(v, LookupCode());
#ifdef DEBUG
// Make sure that the entry frame does not contain more than one
@@ -1265,7 +1302,7 @@ Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
Address inner_pointer) {
Heap* heap = isolate_->heap();
// Check if the inner pointer points into a large object chunk.
- LargePage* large_page = heap->lo_space()->FindPageContainingPc(inner_pointer);
+ LargePage* large_page = heap->lo_space()->FindPage(inner_pointer);
if (large_page != NULL) {
return GcSafeCastToCode(large_page->GetObject(), inner_pointer);
}
@@ -1299,7 +1336,8 @@ InnerPointerToCodeCache::InnerPointerToCodeCacheEntry*
isolate_->counters()->pc_to_code()->Increment();
ASSERT(IsPowerOf2(kInnerPointerToCodeCacheSize));
uint32_t hash = ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(inner_pointer)),
+ v8::internal::kZeroHashSeed);
uint32_t index = hash & (kInnerPointerToCodeCacheSize - 1);
InnerPointerToCodeCacheEntry* entry = cache(index);
if (entry->inner_pointer == inner_pointer) {
@@ -1331,24 +1369,23 @@ int NumRegs(RegList reglist) {
struct JSCallerSavedCodeData {
- JSCallerSavedCodeData() {
- int i = 0;
- for (int r = 0; r < kNumRegs; r++)
- if ((kJSCallerSaved & (1 << r)) != 0)
- reg_code[i++] = r;
-
- ASSERT(i == kNumJSCallerSaved);
- }
int reg_code[kNumJSCallerSaved];
};
+JSCallerSavedCodeData caller_saved_code_data;
-static const JSCallerSavedCodeData kCallerSavedCodeData;
+void SetUpJSCallerSavedCodeData() {
+ int i = 0;
+ for (int r = 0; r < kNumRegs; r++)
+ if ((kJSCallerSaved & (1 << r)) != 0)
+ caller_saved_code_data.reg_code[i++] = r;
+ ASSERT(i == kNumJSCallerSaved);
+}
int JSCallerSavedCode(int n) {
ASSERT(0 <= n && n < kNumJSCallerSaved);
- return kCallerSavedCodeData.reg_code[n];
+ return caller_saved_code_data.reg_code[n];
}
diff --git a/src/3rdparty/v8/src/frames.h b/src/3rdparty/v8/src/frames.h
index 778b803..7178bd4 100644
--- a/src/3rdparty/v8/src/frames.h
+++ b/src/3rdparty/v8/src/frames.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,6 +40,8 @@ typedef uint32_t RegList;
// Get the number of registers in a given register list.
int NumRegs(RegList list);
+void SetUpJSCallerSavedCodeData();
+
// Return the code of the n-th saved register available to JavaScript.
int JSCallerSavedCode(int n);
@@ -84,12 +86,19 @@ class InnerPointerToCodeCache {
class StackHandler BASE_EMBEDDED {
public:
- enum State {
- ENTRY,
- TRY_CATCH,
- TRY_FINALLY
+ enum Kind {
+ JS_ENTRY,
+ CATCH,
+ FINALLY,
+ LAST_KIND = FINALLY
};
+ static const int kKindWidth = 2;
+ STATIC_ASSERT(LAST_KIND < (1 << kKindWidth));
+ static const int kIndexWidth = 32 - kKindWidth;
+ class KindField: public BitField<StackHandler::Kind, 0, kKindWidth> {};
+ class IndexField: public BitField<unsigned, kKindWidth, kIndexWidth> {};
+
// Get the address of this stack handler.
inline Address address() const;
@@ -106,16 +115,16 @@ class StackHandler BASE_EMBEDDED {
static inline StackHandler* FromAddress(Address address);
// Testers
- inline bool is_entry() const;
- inline bool is_try_catch() const;
- inline bool is_try_finally() const;
+ inline bool is_js_entry() const;
+ inline bool is_catch() const;
+ inline bool is_finally() const;
private:
// Accessors.
- inline State state() const;
+ inline Kind kind() const;
inline Object** context_address() const;
- inline Address* pc_address() const;
+ inline Object** code_address() const;
DISALLOW_IMPLICIT_CONSTRUCTORS(StackHandler);
};
@@ -234,6 +243,11 @@ class StackFrame BASE_EMBEDDED {
virtual void Iterate(ObjectVisitor* v) const = 0;
static void IteratePc(ObjectVisitor* v, Address* pc_address, Code* holder);
+ // Sets a callback function for return-address rewriting profilers
+ // to resolve the location of a return address to the location of the
+ // profiler's stashed return address.
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver);
// Printing support.
enum PrintMode { OVERVIEW, DETAILS };
diff --git a/src/3rdparty/v8/src/full-codegen.cc b/src/3rdparty/v8/src/full-codegen.cc
index effa4b9..449c5d2 100644
--- a/src/3rdparty/v8/src/full-codegen.cc
+++ b/src/3rdparty/v8/src/full-codegen.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -51,7 +51,37 @@ void BreakableStatementChecker::Check(Expression* expr) {
}
-void BreakableStatementChecker::VisitDeclaration(Declaration* decl) {
+void BreakableStatementChecker::VisitVariableDeclaration(
+ VariableDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitFunctionDeclaration(
+ FunctionDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitModuleDeclaration(
+ ModuleDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitImportDeclaration(
+ ImportDeclaration* decl) {
+}
+
+void BreakableStatementChecker::VisitExportDeclaration(
+ ExportDeclaration* decl) {
+}
+
+
+void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
+}
+
+void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
+}
+
+void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
+}
+
+void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
}
@@ -273,8 +303,8 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
masm.positions_recorder()->StartGDBJITLineInfoRecording();
#endif
- FullCodeGenerator cgen(&masm);
- cgen.Generate(info);
+ FullCodeGenerator cgen(&masm, info);
+ cgen.Generate();
if (cgen.HasStackOverflow()) {
ASSERT(!isolate->has_pending_exception());
return false;
@@ -283,15 +313,20 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
- code->set_optimizable(info->IsOptimizable());
+ code->set_optimizable(info->IsOptimizable() &&
+ !info->function()->flags()->Contains(kDontOptimize));
cgen.PopulateDeoptimizationData(code);
+ cgen.PopulateTypeFeedbackInfo(code);
+ cgen.PopulateTypeFeedbackCells(code);
code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
+ code->set_handler_table(*cgen.handler_table());
#ifdef ENABLE_DEBUGGER_SUPPORT
code->set_has_debug_break_slots(
info->isolate()->debugger()->IsDebuggerActive());
code->set_compiled_optimizable(info->IsOptimizable());
#endif // ENABLE_DEBUGGER_SUPPORT
code->set_allow_osr_at_loop_nesting_level(0);
+ code->set_profiler_ticks(0);
code->set_stack_check_table_offset(table_offset);
CodeGenerator::PrintCode(code, info);
info->SetCode(code); // May be an empty handle.
@@ -328,8 +363,7 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
if (!info_->HasDeoptimizationSupport()) return;
int length = bailout_entries_.length();
- Handle<DeoptimizationOutputData> data =
- isolate()->factory()->
+ Handle<DeoptimizationOutputData> data = isolate()->factory()->
NewDeoptimizationOutputData(length, TENURED);
for (int i = 0; i < length; i++) {
data->SetAstId(i, Smi::FromInt(bailout_entries_[i].id));
@@ -339,6 +373,30 @@ void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
}
+void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
+ Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
+ info->set_ic_total_count(ic_total_count_);
+ ASSERT(!isolate()->heap()->InNewSpace(*info));
+ code->set_type_feedback_info(*info);
+}
+
+
+void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
+ if (type_feedback_cells_.is_empty()) return;
+ int length = type_feedback_cells_.length();
+ int array_size = TypeFeedbackCells::LengthOfFixedArray(length);
+ Handle<TypeFeedbackCells> cache = Handle<TypeFeedbackCells>::cast(
+ isolate()->factory()->NewFixedArray(array_size, TENURED));
+ for (int i = 0; i < length; i++) {
+ cache->SetAstId(i, Smi::FromInt(type_feedback_cells_[i].ast_id));
+ cache->SetCell(i, *type_feedback_cells_[i].cell);
+ }
+ TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
+ *cache);
+}
+
+
+
void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
PrepareForBailoutForId(node->id(), state);
}
@@ -361,20 +419,23 @@ void FullCodeGenerator::RecordJSReturnSite(Call* call) {
}
-void FullCodeGenerator::PrepareForBailoutForId(int id, State state) {
+void FullCodeGenerator::PrepareForBailoutForId(unsigned id, State state) {
// There's no need to prepare this code for bailouts from already optimized
// code or code that can't be optimized.
- if (!FLAG_deopt || !info_->HasDeoptimizationSupport()) return;
+ if (!info_->HasDeoptimizationSupport()) return;
unsigned pc_and_state =
StateField::encode(state) | PcField::encode(masm_->pc_offset());
+ ASSERT(Smi::IsValid(pc_and_state));
BailoutEntry entry = { id, pc_and_state };
#ifdef DEBUG
- // Assert that we don't have multiple bailout entries for the same node.
- for (int i = 0; i < bailout_entries_.length(); i++) {
- if (bailout_entries_.at(i).id == entry.id) {
- AstPrinter printer;
- PrintF("%s", printer.PrintProgram(info_->function()));
- UNREACHABLE();
+ if (FLAG_enable_slow_asserts) {
+ // Assert that we don't have multiple bailout entries for the same node.
+ for (int i = 0; i < bailout_entries_.length(); i++) {
+ if (bailout_entries_.at(i).id == entry.id) {
+ AstPrinter printer;
+ PrintF("%s", printer.PrintProgram(info_->function()));
+ UNREACHABLE();
+ }
}
}
#endif // DEBUG
@@ -382,10 +443,18 @@ void FullCodeGenerator::PrepareForBailoutForId(int id, State state) {
}
-void FullCodeGenerator::RecordStackCheck(int ast_id) {
+void FullCodeGenerator::RecordTypeFeedbackCell(
+ unsigned id, Handle<JSGlobalPropertyCell> cell) {
+ TypeFeedbackCellEntry entry = { id, cell };
+ type_feedback_cells_.Add(entry);
+}
+
+
+void FullCodeGenerator::RecordStackCheck(unsigned ast_id) {
// The pc offset does not need to be encoded and packed together with a
// state.
- BailoutEntry entry = { ast_id, masm_->pc_offset() };
+ ASSERT(masm_->pc_offset() > 0);
+ BailoutEntry entry = { ast_id, static_cast<unsigned>(masm_->pc_offset()) };
stack_checks_.Add(entry);
}
@@ -499,24 +568,24 @@ void FullCodeGenerator::DoTest(const TestContext* context) {
void FullCodeGenerator::VisitDeclarations(
ZoneList<Declaration*>* declarations) {
- int length = declarations->length();
- int global_count = 0;
- for (int i = 0; i < length; i++) {
- Declaration* decl = declarations->at(i);
- EmitDeclaration(decl->proxy(), decl->mode(), decl->fun(), &global_count);
- }
+ int save_global_count = global_count_;
+ global_count_ = 0;
+
+ AstVisitor::VisitDeclarations(declarations);
// Batch declare global functions and variables.
- if (global_count > 0) {
+ if (global_count_ > 0) {
Handle<FixedArray> array =
- isolate()->factory()->NewFixedArray(3 * global_count, TENURED);
+ isolate()->factory()->NewFixedArray(2 * global_count_, TENURED);
+ int length = declarations->length();
for (int j = 0, i = 0; i < length; i++) {
Declaration* decl = declarations->at(i);
Variable* var = decl->proxy()->var();
if (var->IsUnallocated()) {
array->set(j++, *(var->name()));
- if (decl->fun() == NULL) {
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ if (fun_decl == NULL) {
if (var->binding_needs_init()) {
// In case this binding needs initialization use the hole.
array->set_the_hole(j++);
@@ -525,7 +594,7 @@ void FullCodeGenerator::VisitDeclarations(
}
} else {
Handle<SharedFunctionInfo> function =
- Compiler::BuildFunctionInfo(decl->fun(), script());
+ Compiler::BuildFunctionInfo(fun_decl->fun(), script());
// Check for stack-overflow exception.
if (function.is_null()) {
SetStackOverflow();
@@ -533,21 +602,67 @@ void FullCodeGenerator::VisitDeclarations(
}
array->set(j++, *function);
}
- array->set(j++, Smi::FromInt(var->is_qml_global()));
}
}
// Invoke the platform-dependent code generator to do the actual
// declaration the global functions and variables.
DeclareGlobals(array);
}
+
+ global_count_ = save_global_count;
+}
+
+
+void FullCodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), decl->fun());
+}
+
+
+void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* decl) {
+ EmitDeclaration(decl->proxy(), decl->mode(), NULL);
+}
+
+
+void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* decl) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModulePath(ModulePath* module) {
+ // TODO(rossberg)
+}
+
+
+void FullCodeGenerator::VisitModuleUrl(ModuleUrl* decl) {
+ // TODO(rossberg)
}
int FullCodeGenerator::DeclareGlobalsFlags() {
- ASSERT(DeclareGlobalsStrictModeFlag::is_valid(strict_mode_flag()));
+ ASSERT(DeclareGlobalsLanguageMode::is_valid(language_mode()));
return DeclareGlobalsEvalFlag::encode(is_eval()) |
- DeclareGlobalsStrictModeFlag::encode(strict_mode_flag()) |
- DeclareGlobalsNativeFlag::encode(is_native());
+ DeclareGlobalsNativeFlag::encode(is_native()) |
+ DeclareGlobalsLanguageMode::encode(language_mode());
}
@@ -799,9 +914,8 @@ void FullCodeGenerator::VisitBlock(Block* stmt) {
if (stmt->block_scope() != NULL) {
{ Comment cmnt(masm_, "[ Extend block context");
scope_ = stmt->block_scope();
- Handle<SerializedScopeInfo> scope_info = scope_->GetSerializedScopeInfo();
- int heap_slots =
- scope_info->NumberOfContextSlots() - Context::MIN_CONTEXT_SLOTS;
+ Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
+ int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
__ Push(scope_info);
PushFunctionArgumentForContextAllocation();
if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
@@ -995,7 +1109,7 @@ void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
// Check stack before looping.
PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
__ bind(&stack_check);
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &body);
__ jmp(&body);
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
@@ -1024,7 +1138,7 @@ void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
SetStatementPosition(stmt);
// Check stack before looping.
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &body);
__ bind(&test);
VisitForControl(stmt->cond(),
@@ -1043,6 +1157,10 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
Label test, body;
Iteration loop_statement(this, stmt);
+
+ // Set statement position for a break slot before entering the for-body.
+ SetStatementPosition(stmt);
+
if (stmt->init() != NULL) {
Visit(stmt->init());
}
@@ -1057,7 +1175,6 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
__ bind(loop_statement.continue_label());
- SetStatementPosition(stmt);
if (stmt->next() != NULL) {
Visit(stmt->next());
}
@@ -1067,7 +1184,7 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
SetStatementPosition(stmt);
// Check stack before looping.
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &body);
__ bind(&test);
if (stmt->cond() != NULL) {
@@ -1088,20 +1205,17 @@ void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
Comment cmnt(masm_, "[ TryCatchStatement");
SetStatementPosition(stmt);
- // The try block adds a handler to the exception handler chain
- // before entering, and removes it again when exiting normally.
- // If an exception is thrown during execution of the try block,
- // control is passed to the handler, which also consumes the handler.
- // At this point, the exception is in a register, and store it in
- // the temporary local variable (prints as ".catch-var") before
- // executing the catch block. The catch block has been rewritten
- // to introduce a new scope to bind the catch variable and to remove
- // that scope again afterwards.
-
- Label try_handler_setup, done;
- __ Call(&try_handler_setup);
- // Try handler code, exception in result register.
-
+ // The try block adds a handler to the exception handler chain before
+ // entering, and removes it again when exiting normally. If an exception
+ // is thrown during execution of the try block, the handler is consumed
+ // and control is passed to the catch block with the exception in the
+ // result register.
+
+ Label try_entry, handler_entry, exit;
+ __ jmp(&try_entry);
+ __ bind(&handler_entry);
+ handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
+ // Exception handler code, the exception is in the result register.
// Extend the context before executing the catch block.
{ Comment cmnt(masm_, "[ Extend catch context");
__ Push(stmt->variable()->name());
@@ -1115,24 +1229,23 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
Scope* saved_scope = scope();
scope_ = stmt->scope();
ASSERT(scope_->declarations()->is_empty());
- { WithOrCatch body(this);
+ { WithOrCatch catch_body(this);
Visit(stmt->catch_block());
}
// Restore the context.
LoadContextField(context_register(), Context::PREVIOUS_INDEX);
StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
scope_ = saved_scope;
- __ jmp(&done);
+ __ jmp(&exit);
// Try block code. Sets up the exception handler chain.
- __ bind(&try_handler_setup);
- {
- TryCatch try_block(this);
- __ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER);
+ __ bind(&try_entry);
+ __ PushTryHandler(StackHandler::CATCH, stmt->index());
+ { TryCatch try_body(this);
Visit(stmt->try_block());
- __ PopTryHandler();
}
- __ bind(&done);
+ __ PopTryHandler();
+ __ bind(&exit);
}
@@ -1144,12 +1257,12 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
//
// The try-finally construct can enter the finally block in three ways:
// 1. By exiting the try-block normally. This removes the try-handler and
- // calls the finally block code before continuing.
+ // calls the finally block code before continuing.
// 2. By exiting the try-block with a function-local control flow transfer
// (break/continue/return). The site of the, e.g., break removes the
// try handler and calls the finally block code before continuing
// its outward control transfer.
- // 3. by exiting the try-block with a thrown exception.
+ // 3. By exiting the try-block with a thrown exception.
// This can happen in nested function calls. It traverses the try-handler
// chain and consumes the try-handler entry before jumping to the
// handler code. The handler code then calls the finally-block before
@@ -1160,44 +1273,39 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// exception) in the result register (rax/eax/r0), both of which must
// be preserved. The return address isn't GC-safe, so it should be
// cooked before GC.
- Label finally_entry;
- Label try_handler_setup;
-
- // Setup the try-handler chain. Use a call to
- // Jump to try-handler setup and try-block code. Use call to put try-handler
- // address on stack.
- __ Call(&try_handler_setup);
- // Try handler code. Return address of call is pushed on handler stack.
- {
- // This code is only executed during stack-handler traversal when an
- // exception is thrown. The exception is in the result register, which
- // is retained by the finally block.
- // Call the finally block and then rethrow the exception if it returns.
- __ Call(&finally_entry);
- __ push(result_register());
- __ CallRuntime(Runtime::kReThrow, 1);
- }
+ Label try_entry, handler_entry, finally_entry;
+
+ // Jump to try-handler setup and try-block code.
+ __ jmp(&try_entry);
+ __ bind(&handler_entry);
+ handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
+ // Exception handler code. This code is only executed when an exception
+ // is thrown. The exception is in the result register, and must be
+ // preserved by the finally block. Call the finally block and then
+ // rethrow the exception if it returns.
+ __ Call(&finally_entry);
+ __ push(result_register());
+ __ CallRuntime(Runtime::kReThrow, 1);
+ // Finally block implementation.
__ bind(&finally_entry);
- {
- // Finally block implementation.
- Finally finally_block(this);
- EnterFinallyBlock();
+ EnterFinallyBlock();
+ { Finally finally_body(this);
Visit(stmt->finally_block());
- ExitFinallyBlock(); // Return to the calling code.
}
+ ExitFinallyBlock(); // Return to the calling code.
- __ bind(&try_handler_setup);
- {
- // Setup try handler (stack pointer registers).
- TryFinally try_block(this, &finally_entry);
- __ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER);
+ // Set up try handler.
+ __ bind(&try_entry);
+ __ PushTryHandler(StackHandler::FINALLY, stmt->index());
+ { TryFinally try_body(this, &finally_entry);
Visit(stmt->try_block());
- __ PopTryHandler();
}
+ __ PopTryHandler();
// Execute the finally block on the way out. Clobber the unpredictable
- // value in the accumulator with one that's safe for GC. The finally
- // block will unconditionally preserve the accumulator on the stack.
+ // value in the result register with one that's safe for GC because the
+ // finally block will unconditionally preserve the result register on the
+ // stack.
ClearAccumulator();
__ Call(&finally_entry);
}
@@ -1293,7 +1401,7 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
- Expression *sub_expr;
+ Expression* sub_expr;
Handle<String> check;
if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
EmitLiteralCompareTypeof(expr, sub_expr, check);
diff --git a/src/3rdparty/v8/src/full-codegen.h b/src/3rdparty/v8/src/full-codegen.h
index 4818040..a308d83 100644
--- a/src/3rdparty/v8/src/full-codegen.h
+++ b/src/3rdparty/v8/src/full-codegen.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -77,24 +77,27 @@ class FullCodeGenerator: public AstVisitor {
TOS_REG
};
- explicit FullCodeGenerator(MacroAssembler* masm)
+ FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
: masm_(masm),
- info_(NULL),
- scope_(NULL),
+ info_(info),
+ scope_(info->scope()),
nesting_stack_(NULL),
loop_depth_(0),
+ global_count_(0),
context_(NULL),
- bailout_entries_(0),
- stack_checks_(2) { // There's always at least one.
- }
+ bailout_entries_(info->HasDeoptimizationSupport()
+ ? info->function()->ast_node_count() : 0),
+ stack_checks_(2), // There's always at least one.
+ type_feedback_cells_(info->HasDeoptimizationSupport()
+ ? info->function()->ast_node_count() : 0),
+ ic_total_count_(0) { }
static bool MakeCode(CompilationInfo* info);
- void Generate(CompilationInfo* info);
- void PopulateDeoptimizationData(Handle<Code> code);
-
- class StateField : public BitField<State, 0, 8> { };
- class PcField : public BitField<unsigned, 8, 32-8> { };
+ // Encode state and pc-offset as a BitField<type, start, size>.
+ // Only use 30 bits because we encode the result as a smi.
+ class StateField : public BitField<State, 0, 1> { };
+ class PcField : public BitField<unsigned, 1, 30-1> { };
static const char* State2String(State state) {
switch (state) {
@@ -140,11 +143,13 @@ class FullCodeGenerator: public AstVisitor {
return previous_;
}
- protected:
+ protected:
MacroAssembler* masm() { return codegen_->masm(); }
FullCodeGenerator* codegen_;
NestedStatement* previous_;
+
+ private:
DISALLOW_COPY_AND_ASSIGN(NestedStatement);
};
@@ -388,7 +393,11 @@ class FullCodeGenerator: public AstVisitor {
// Bailout support.
void PrepareForBailout(Expression* node, State state);
- void PrepareForBailoutForId(int id, State state);
+ void PrepareForBailoutForId(unsigned id, State state);
+
+ // Cache cell support. This associates AST ids with global property cells
+ // that will be cleared during GC and collected by the type-feedback oracle.
+ void RecordTypeFeedbackCell(unsigned id, Handle<JSGlobalPropertyCell> cell);
// Record a call's return site offset, used to rebuild the frame if the
// called function was inlined at the site.
@@ -406,20 +415,26 @@ class FullCodeGenerator: public AstVisitor {
// Platform-specific code for a variable, constant, or function
// declaration. Functions have an initial value.
+ // Increments global_count_ for unallocated variables.
void EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count);
+ FunctionLiteral* function);
// Platform-specific code for checking the stack limit at the back edge of
// a loop.
- void EmitStackCheck(IterationStatement* stmt);
+ // This is meant to be called at loop back edges, |back_edge_target| is
+ // the jump target of the back edge and is used to approximate the amount
+ // of code inside the loop.
+ void EmitStackCheck(IterationStatement* stmt, Label* back_edge_target);
// Record the OSR AST id corresponding to a stack check in the code.
- void RecordStackCheck(int osr_ast_id);
+ void RecordStackCheck(unsigned osr_ast_id);
// Emit a table of stack check ids and pcs into the code stream. Return
// the offset of the start of the table.
unsigned EmitStackCheckTable();
+ void EmitProfilingCounterDecrement(int delta);
+ void EmitProfilingCounterReset();
+
// Platform-specific return sequence
void EmitReturnSequence();
@@ -450,6 +465,8 @@ class FullCodeGenerator: public AstVisitor {
Label* done);
void EmitVariableLoad(VariableProxy* proxy);
+ void EmitAccessor(Expression* expression);
+
// Expects the arguments and the function already pushed.
void EmitResolvePossiblyDirectEval(int arg_count);
@@ -483,7 +500,7 @@ class FullCodeGenerator: public AstVisitor {
// Assign to the given expression as if via '='. The right-hand-side value
// is expected in the accumulator.
- void EmitAssignment(Expression* expr, int bailout_ast_id);
+ void EmitAssignment(Expression* expr);
// Complete a variable assignment. The right-hand-side value is expected
// in the accumulator.
@@ -499,6 +516,10 @@ class FullCodeGenerator: public AstVisitor {
// accumulator.
void EmitKeyedPropertyAssignment(Assignment* expr);
+ void CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
+ unsigned ast_id = kNoASTId);
+
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
void SetStatementPosition(Statement* stmt);
@@ -527,13 +548,12 @@ class FullCodeGenerator: public AstVisitor {
Handle<Script> script() { return info_->script(); }
bool is_eval() { return info_->is_eval(); }
bool is_native() { return info_->is_native(); }
- bool is_strict_mode() {
- return strict_mode_flag() == kStrictMode;
+ bool is_classic_mode() {
+ return language_mode() == CLASSIC_MODE;
}
- StrictModeFlag strict_mode_flag() {
- return function()->strict_mode_flag();
+ LanguageMode language_mode() {
+ return function()->language_mode();
}
- bool is_qml_mode() { return function()->qml_mode(); }
FunctionLiteral* function() { return info_->function(); }
Scope* scope() { return scope_; }
@@ -565,11 +585,23 @@ class FullCodeGenerator: public AstVisitor {
void VisitForTypeofValue(Expression* expr);
+ void Generate();
+ void PopulateDeoptimizationData(Handle<Code> code);
+ void PopulateTypeFeedbackInfo(Handle<Code> code);
+ void PopulateTypeFeedbackCells(Handle<Code> code);
+
+ Handle<FixedArray> handler_table() { return handler_table_; }
+
struct BailoutEntry {
unsigned id;
unsigned pc_and_state;
};
+ struct TypeFeedbackCellEntry {
+ unsigned ast_id;
+ Handle<JSGlobalPropertyCell> cell;
+ };
+
class ExpressionContext BASE_EMBEDDED {
public:
@@ -617,8 +649,8 @@ class FullCodeGenerator: public AstVisitor {
Label** if_false,
Label** fall_through) const = 0;
- // Returns true if we are evaluating only for side effects (ie if the result
- // will be discarded).
+ // Returns true if we are evaluating only for side effects (i.e. if the
+ // result will be discarded).
virtual bool IsEffect() const { return false; }
// Returns true if we are evaluating for the value (in accu/on stack).
@@ -753,9 +785,14 @@ class FullCodeGenerator: public AstVisitor {
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
+ int global_count_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;
+ ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
+ int ic_total_count_;
+ Handle<FixedArray> handler_table_;
+ Handle<JSGlobalPropertyCell> profiling_counter_;
friend class NestedStatement;
@@ -763,6 +800,28 @@ class FullCodeGenerator: public AstVisitor {
};
+// A map from property names to getter/setter pairs allocated in the zone.
+class AccessorTable: public TemplateHashMap<Literal,
+ ObjectLiteral::Accessors,
+ ZoneListAllocationPolicy> {
+ public:
+ explicit AccessorTable(Zone* zone) :
+ TemplateHashMap<Literal,
+ ObjectLiteral::Accessors,
+ ZoneListAllocationPolicy>(Literal::Match),
+ zone_(zone) { }
+
+ Iterator lookup(Literal* literal) {
+ Iterator it = find(literal, true);
+ if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
+ return it;
+ }
+
+ private:
+ Zone* zone_;
+};
+
+
} } // namespace v8::internal
#endif // V8_FULL_CODEGEN_H_
diff --git a/src/3rdparty/v8/src/gdb-jit.cc b/src/3rdparty/v8/src/gdb-jit.cc
index 68cb053..d3cd447 100644
--- a/src/3rdparty/v8/src/gdb-jit.cc
+++ b/src/3rdparty/v8/src/gdb-jit.cc
@@ -33,6 +33,7 @@
#include "compiler.h"
#include "global-handles.h"
#include "messages.h"
+#include "platform.h"
#include "natives.h"
#include "scopeinfo.h"
@@ -1115,13 +1116,13 @@ class DebugInfoSection : public DebugSection {
int context_slots = scope_info.number_of_context_slots();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
- int locals = scope_info.NumberOfLocals();
+ int locals = scope_info.LocalCount();
int current_abbreviation = 4;
for (int param = 0; param < params; ++param) {
w->WriteULEB128(current_abbreviation++);
w->WriteString(
- *scope_info.parameter_name(param)->ToCString(DISALLOW_NULLS));
+ *scope_info.ParameterName(param)->ToCString(DISALLOW_NULLS));
w->Write<uint32_t>(ty_offset);
Writer::Slot<uint32_t> block_size = w->CreateSlotHere<uint32_t>();
uintptr_t block_start = w->position();
@@ -1312,7 +1313,7 @@ class DebugAbbrevSection : public DebugSection {
int context_slots = scope_info.number_of_context_slots();
// The real slot ID is internal_slots + context_slot_id.
int internal_slots = Context::MIN_CONTEXT_SLOTS;
- int locals = scope_info.NumberOfLocals();
+ int locals = scope_info.LocalCount();
int total_children =
params + slots + context_slots + internal_slots + locals + 2;
@@ -1556,23 +1557,23 @@ class DebugLineSection : public DebugSection {
class UnwindInfoSection : public DebugSection {
public:
- explicit UnwindInfoSection(CodeDescription *desc);
- virtual bool WriteBody(Writer *w);
+ explicit UnwindInfoSection(CodeDescription* desc);
+ virtual bool WriteBody(Writer* w);
- int WriteCIE(Writer *w);
- void WriteFDE(Writer *w, int);
+ int WriteCIE(Writer* w);
+ void WriteFDE(Writer* w, int);
- void WriteFDEStateOnEntry(Writer *w);
- void WriteFDEStateAfterRBPPush(Writer *w);
- void WriteFDEStateAfterRBPSet(Writer *w);
- void WriteFDEStateAfterRBPPop(Writer *w);
+ void WriteFDEStateOnEntry(Writer* w);
+ void WriteFDEStateAfterRBPPush(Writer* w);
+ void WriteFDEStateAfterRBPSet(Writer* w);
+ void WriteFDEStateAfterRBPPop(Writer* w);
- void WriteLength(Writer *w,
+ void WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position);
private:
- CodeDescription *desc_;
+ CodeDescription* desc_;
// DWARF3 Specification, Table 7.23
enum CFIInstructions {
@@ -1623,7 +1624,7 @@ class UnwindInfoSection : public DebugSection {
};
-void UnwindInfoSection::WriteLength(Writer *w,
+void UnwindInfoSection::WriteLength(Writer* w,
Writer::Slot<uint32_t>* length_slot,
int initial_position) {
uint32_t align = (w->position() - initial_position) % kPointerSize;
@@ -1639,7 +1640,7 @@ void UnwindInfoSection::WriteLength(Writer *w,
}
-UnwindInfoSection::UnwindInfoSection(CodeDescription *desc)
+UnwindInfoSection::UnwindInfoSection(CodeDescription* desc)
#ifdef __ELF
: ELFSection(".eh_frame", TYPE_X86_64_UNWIND, 1),
#else
@@ -1648,7 +1649,7 @@ UnwindInfoSection::UnwindInfoSection(CodeDescription *desc)
#endif
desc_(desc) { }
-int UnwindInfoSection::WriteCIE(Writer *w) {
+int UnwindInfoSection::WriteCIE(Writer* w) {
Writer::Slot<uint32_t> cie_length_slot = w->CreateSlotHere<uint32_t>();
uint32_t cie_position = w->position();
@@ -1668,7 +1669,7 @@ int UnwindInfoSection::WriteCIE(Writer *w) {
}
-void UnwindInfoSection::WriteFDE(Writer *w, int cie_position) {
+void UnwindInfoSection::WriteFDE(Writer* w, int cie_position) {
// The only FDE for this function. The CFA is the current RBP.
Writer::Slot<uint32_t> fde_length_slot = w->CreateSlotHere<uint32_t>();
int fde_position = w->position();
@@ -1686,7 +1687,7 @@ void UnwindInfoSection::WriteFDE(Writer *w, int cie_position) {
}
-void UnwindInfoSection::WriteFDEStateOnEntry(Writer *w) {
+void UnwindInfoSection::WriteFDEStateOnEntry(Writer* w) {
// The first state, just after the control has been transferred to the the
// function.
@@ -1713,7 +1714,7 @@ void UnwindInfoSection::WriteFDEStateOnEntry(Writer *w) {
}
-void UnwindInfoSection::WriteFDEStateAfterRBPPush(Writer *w) {
+void UnwindInfoSection::WriteFDEStateAfterRBPPush(Writer* w) {
// The second state, just after RBP has been pushed.
// RBP / CFA for this function is now the current RSP, so just set the
@@ -1734,7 +1735,7 @@ void UnwindInfoSection::WriteFDEStateAfterRBPPush(Writer *w) {
}
-void UnwindInfoSection::WriteFDEStateAfterRBPSet(Writer *w) {
+void UnwindInfoSection::WriteFDEStateAfterRBPSet(Writer* w) {
// The third state, after the RBP has been set.
// The CFA can now directly be set to RBP.
@@ -1749,7 +1750,7 @@ void UnwindInfoSection::WriteFDEStateAfterRBPSet(Writer *w) {
}
-void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer *w) {
+void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer* w) {
// The fourth (final) state. The RBP has been popped (just before issuing a
// return).
@@ -1769,7 +1770,7 @@ void UnwindInfoSection::WriteFDEStateAfterRBPPop(Writer *w) {
}
-bool UnwindInfoSection::WriteBody(Writer *w) {
+bool UnwindInfoSection::WriteBody(Writer* w) {
uint32_t cie_position = WriteCIE(w);
WriteFDE(w, cie_position);
return true;
@@ -1810,8 +1811,8 @@ extern "C" {
struct JITDescriptor {
uint32_t version_;
uint32_t action_flag_;
- JITCodeEntry *relevant_entry_;
- JITCodeEntry *first_entry_;
+ JITCodeEntry* relevant_entry_;
+ JITCodeEntry* first_entry_;
};
// GDB will place breakpoint into this function.
@@ -1998,7 +1999,7 @@ void GDBJITInterface::AddCode(Handle<String> name,
}
}
-static void AddUnwindInfo(CodeDescription *desc) {
+static void AddUnwindInfo(CodeDescription* desc) {
#ifdef V8_TARGET_ARCH_X64
if (desc->tag() == GDBJITInterface::FUNCTION) {
// To avoid propagating unwinding information through
@@ -2035,7 +2036,7 @@ static void AddUnwindInfo(CodeDescription *desc) {
}
-Mutex* GDBJITInterface::mutex_ = OS::CreateMutex();
+static LazyMutex mutex = LAZY_MUTEX_INITIALIZER;
void GDBJITInterface::AddCode(const char* name,
@@ -2045,7 +2046,7 @@ void GDBJITInterface::AddCode(const char* name,
CompilationInfo* info) {
if (!FLAG_gdbjit) return;
- ScopedLock lock(mutex_);
+ ScopedLock lock(mutex.Pointer());
AssertNoAllocation no_gc;
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
@@ -2126,7 +2127,7 @@ void GDBJITInterface::AddCode(GDBJITInterface::CodeTag tag, Code* code) {
void GDBJITInterface::RemoveCode(Code* code) {
if (!FLAG_gdbjit) return;
- ScopedLock lock(mutex_);
+ ScopedLock lock(mutex.Pointer());
HashMap::Entry* e = GetEntries()->Lookup(code,
HashForCodeObject(code),
false);
@@ -2146,7 +2147,7 @@ void GDBJITInterface::RemoveCode(Code* code) {
void GDBJITInterface::RegisterDetailedLineInfo(Code* code,
GDBJITLineInfo* line_info) {
- ScopedLock lock(mutex_);
+ ScopedLock lock(mutex.Pointer());
ASSERT(!IsLineInfoTagged(line_info));
HashMap::Entry* e = GetEntries()->Lookup(code, HashForCodeObject(code), true);
ASSERT(e->value == NULL);
diff --git a/src/3rdparty/v8/src/gdb-jit.h b/src/3rdparty/v8/src/gdb-jit.h
index 2cf15bc..0eca938 100644
--- a/src/3rdparty/v8/src/gdb-jit.h
+++ b/src/3rdparty/v8/src/gdb-jit.h
@@ -132,9 +132,6 @@ class GDBJITInterface: public AllStatic {
static void RemoveCode(Code* code);
static void RegisterDetailedLineInfo(Code* code, GDBJITLineInfo* line_info);
-
- private:
- static Mutex* mutex_;
};
#define GDBJIT(action) GDBJITInterface::action
diff --git a/src/3rdparty/v8/src/global-handles.cc b/src/3rdparty/v8/src/global-handles.cc
index 87066fa..9c0ad45 100644
--- a/src/3rdparty/v8/src/global-handles.cc
+++ b/src/3rdparty/v8/src/global-handles.cc
@@ -232,7 +232,7 @@ class GlobalHandles::Node {
VMState state(isolate, EXTERNAL);
func(object, par);
}
- // Absense of explicit cleanup or revival of weak handle
+ // Absence of explicit cleanup or revival of weak handle
// in most of the cases would lead to memory leak.
ASSERT(state_ != NEAR_DEATH);
return true;
@@ -384,6 +384,7 @@ GlobalHandles::GlobalHandles(Isolate* isolate)
: isolate_(isolate),
number_of_weak_handles_(0),
number_of_global_object_weak_handles_(0),
+ number_of_global_handles_(0),
first_block_(NULL),
first_used_block_(NULL),
first_free_(NULL),
@@ -403,6 +404,7 @@ GlobalHandles::~GlobalHandles() {
Handle<Object> GlobalHandles::Create(Object* value) {
isolate_->counters()->global_handles()->Increment();
+ number_of_global_handles_++;
if (first_free_ == NULL) {
first_block_ = new NodeBlock(first_block_);
first_block_->PutNodesOnFreeList(&first_free_);
@@ -423,6 +425,7 @@ Handle<Object> GlobalHandles::Create(Object* value) {
void GlobalHandles::Destroy(Object** location) {
isolate_->counters()->global_handles()->Decrement();
+ number_of_global_handles_--;
if (location == NULL) return;
Node::FromLocation(location)->Release(this);
}
diff --git a/src/3rdparty/v8/src/global-handles.h b/src/3rdparty/v8/src/global-handles.h
index 153d4da..ddf5fe2 100644
--- a/src/3rdparty/v8/src/global-handles.h
+++ b/src/3rdparty/v8/src/global-handles.h
@@ -143,6 +143,11 @@ class GlobalHandles {
return number_of_global_object_weak_handles_;
}
+ // Returns the current number of handles to global objects.
+ int NumberOfGlobalHandles() {
+ return number_of_global_handles_;
+ }
+
// Clear the weakness of a global handle.
void ClearWeakness(Object** location);
@@ -248,6 +253,9 @@ class GlobalHandles {
// number_of_weak_handles_.
int number_of_global_object_weak_handles_;
+ // Field always containing the number of handles to global objects.
+ int number_of_global_handles_;
+
// List of all allocated node blocks.
NodeBlock* first_block_;
diff --git a/src/3rdparty/v8/src/globals.h b/src/3rdparty/v8/src/globals.h
index a532039..25d4ffe 100644
--- a/src/3rdparty/v8/src/globals.h
+++ b/src/3rdparty/v8/src/globals.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -267,8 +267,9 @@ const int kBinary32ExponentShift = 23;
// other bits set.
const uint64_t kQuietNaNMask = static_cast<uint64_t>(0xfff) << 51;
-// ASCII/UC16 constants
+// ASCII/UTF-16 constants
// Code-point values in Unicode 4.0 are 21 bits wide.
+// Code units in UTF-16 are 16 bits wide.
typedef uint16_t uc16;
typedef int32_t uc32;
const int kASCIISize = kCharSize;
@@ -299,7 +300,7 @@ const uint32_t kMaxAsciiCharCodeU = 0x7fu;
// The USE(x) template is used to silence C++ compiler warnings
// issued for (yet) unused variables (typically parameters).
template <typename T>
-static inline void USE(T) { }
+inline void USE(T) { }
// FUNCTION_ADDR(f) gets the address of a C function f.
@@ -366,7 +367,30 @@ template <typename T, class P = FreeStoreAllocationPolicy> class List;
// -----------------------------------------------------------------------------
// Declarations for use in both the preparser and the rest of V8.
+// The different language modes that V8 implements. ES5 defines two language
+// modes: an unrestricted mode respectively a strict mode which are indicated by
+// CLASSIC_MODE respectively STRICT_MODE in the enum. The harmony spec drafts
+// for the next ES standard specify a new third mode which is called 'extended
+// mode'. The extended mode is only available if the harmony flag is set. It is
+// based on the 'strict mode' and adds new functionality to it. This means that
+// most of the semantics of these two modes coincide.
+//
+// In the current draft the term 'base code' is used to refer to code that is
+// neither in strict nor extended mode. However, the more distinguishing term
+// 'classic mode' is used in V8 instead to avoid mix-ups.
+
+enum LanguageMode {
+ CLASSIC_MODE,
+ STRICT_MODE,
+ EXTENDED_MODE
+};
+
+
// The Strict Mode (ECMA-262 5th edition, 4.2.2).
+//
+// This flag is used in the backend to represent the language mode. So far
+// there is no semantic difference between the strict and the extended mode in
+// the backend, so both modes are represented by the kStrictMode value.
enum StrictModeFlag {
kNonStrictMode,
kStrictMode
diff --git a/src/3rdparty/v8/src/handles.cc b/src/3rdparty/v8/src/handles.cc
index c2a173d..416ecbd 100644
--- a/src/3rdparty/v8/src/handles.cc
+++ b/src/3rdparty/v8/src/handles.cc
@@ -208,42 +208,6 @@ void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
}
-void NormalizeProperties(Handle<JSObject> object,
- PropertyNormalizationMode mode,
- int expected_additional_properties) {
- CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
- object->NormalizeProperties(
- mode,
- expected_additional_properties));
-}
-
-
-Handle<NumberDictionary> NormalizeElements(Handle<JSObject> object) {
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->NormalizeElements(),
- NumberDictionary);
-}
-
-
-void TransformToFastProperties(Handle<JSObject> object,
- int unused_property_fields) {
- CALL_HEAP_FUNCTION_VOID(
- object->GetIsolate(),
- object->TransformToFastProperties(unused_property_fields));
-}
-
-
-Handle<NumberDictionary> NumberDictionarySet(
- Handle<NumberDictionary> dictionary,
- uint32_t index,
- Handle<Object> value,
- PropertyDetails details) {
- CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
- dictionary->Set(index, *value, details),
- NumberDictionary);
-}
-
-
void FlattenString(Handle<String> string) {
CALL_HEAP_FUNCTION_VOID(string->GetIsolate(), string->TryFlatten());
}
@@ -265,19 +229,6 @@ Handle<Object> SetPrototype(Handle<JSFunction> function,
}
-Handle<Object> SetProperty(Handle<JSReceiver> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode,
- bool skip_fallback_interceptor) {
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->SetProperty(*key, *value, attributes, strict_mode,
- skip_fallback_interceptor),
- Object);
-}
-
-
Handle<Object> SetProperty(Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
@@ -305,16 +256,6 @@ Handle<Object> ForceSetProperty(Handle<JSObject> object,
}
-Handle<Object> SetNormalizedProperty(Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyDetails details) {
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->SetNormalizedProperty(*key, *value, details),
- Object);
-}
-
-
Handle<Object> ForceDeleteProperty(Handle<JSObject> object,
Handle<Object> key) {
Isolate* isolate = object->GetIsolate();
@@ -324,30 +265,6 @@ Handle<Object> ForceDeleteProperty(Handle<JSObject> object,
}
-Handle<Object> SetLocalPropertyIgnoreAttributes(
- Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes) {
- CALL_HEAP_FUNCTION(
- object->GetIsolate(),
- object->SetLocalPropertyIgnoreAttributes(*key, *value, attributes),
- Object);
-}
-
-
-void SetLocalPropertyNoThrow(Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes) {
- Isolate* isolate = object->GetIsolate();
- ASSERT(!isolate->has_pending_exception());
- CHECK(!SetLocalPropertyIgnoreAttributes(
- object, key, value, attributes).is_null());
- CHECK(!isolate->has_pending_exception());
-}
-
-
Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
Handle<String> key,
Handle<Object> value,
@@ -391,12 +308,6 @@ Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver,
}
-Handle<Object> GetPrototype(Handle<Object> obj) {
- Handle<Object> result(obj->GetPrototype());
- return result;
-}
-
-
Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value) {
const bool skip_hidden_prototypes = false;
CALL_HEAP_FUNCTION(obj->GetIsolate(),
@@ -404,44 +315,6 @@ Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value) {
}
-Handle<Object> PreventExtensions(Handle<JSObject> object) {
- CALL_HEAP_FUNCTION(object->GetIsolate(), object->PreventExtensions(), Object);
-}
-
-
-Handle<Object> SetHiddenProperty(Handle<JSObject> obj,
- Handle<String> key,
- Handle<Object> value) {
- CALL_HEAP_FUNCTION(obj->GetIsolate(),
- obj->SetHiddenProperty(*key, *value),
- Object);
-}
-
-
-int GetIdentityHash(Handle<JSReceiver> obj) {
- CALL_AND_RETRY(obj->GetIsolate(),
- obj->GetIdentityHash(ALLOW_CREATION),
- return Smi::cast(__object__)->value(),
- return 0);
-}
-
-
-Handle<Object> DeleteElement(Handle<JSObject> obj,
- uint32_t index) {
- CALL_HEAP_FUNCTION(obj->GetIsolate(),
- obj->DeleteElement(index, JSObject::NORMAL_DELETION),
- Object);
-}
-
-
-Handle<Object> DeleteProperty(Handle<JSObject> obj,
- Handle<String> prop) {
- CALL_HEAP_FUNCTION(obj->GetIsolate(),
- obj->DeleteProperty(*prop, JSObject::NORMAL_DELETION),
- Object);
-}
-
-
Handle<Object> LookupSingleCharacterStringFromCode(uint32_t index) {
Isolate* isolate = Isolate::Current();
CALL_HEAP_FUNCTION(
@@ -459,43 +332,6 @@ Handle<String> SubString(Handle<String> str,
}
-Handle<Object> SetElement(Handle<JSObject> object,
- uint32_t index,
- Handle<Object> value,
- StrictModeFlag strict_mode) {
- if (object->HasExternalArrayElements()) {
- if (!value->IsSmi() && !value->IsHeapNumber() && !value->IsUndefined()) {
- bool has_exception;
- Handle<Object> number = Execution::ToNumber(value, &has_exception);
- if (has_exception) return Handle<Object>();
- value = number;
- }
- }
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->SetElement(index, *value, strict_mode, true),
- Object);
-}
-
-
-Handle<Object> SetOwnElement(Handle<JSObject> object,
- uint32_t index,
- Handle<Object> value,
- StrictModeFlag strict_mode) {
- ASSERT(!object->HasExternalArrayElements());
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->SetElement(index, *value, strict_mode, false),
- Object);
-}
-
-
-Handle<Object> TransitionElementsKind(Handle<JSObject> object,
- ElementsKind to_kind) {
- CALL_HEAP_FUNCTION(object->GetIsolate(),
- object->TransitionElementsKind(to_kind),
- Object);
-}
-
-
Handle<JSObject> Copy(Handle<JSObject> obj) {
Isolate* isolate = obj->GetIsolate();
CALL_HEAP_FUNCTION(isolate,
@@ -664,6 +500,19 @@ int GetScriptLineNumber(Handle<Script> script, int code_pos) {
return right + script->line_offset()->value();
}
+// Convert code position into column number.
+int GetScriptColumnNumber(Handle<Script> script, int code_pos) {
+ int line_number = GetScriptLineNumber(script, code_pos);
+ if (line_number == -1) return -1;
+
+ AssertNoAllocation no_allocation;
+ FixedArray* line_ends_array = FixedArray::cast(script->line_ends());
+ line_number = line_number - script->line_offset()->value();
+ if (line_number == 0) return code_pos + script->column_offset()->value();
+ int prev_line_end_pos =
+ Smi::cast(line_ends_array->get(line_number - 1))->value();
+ return code_pos - (prev_line_end_pos + 1);
+}
int GetScriptLineNumberSafe(Handle<Script> script, int code_pos) {
AssertNoAllocation no_allocation;
@@ -862,31 +711,62 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
isolate);
}
isolate->counters()->enum_cache_misses()->Increment();
- int num_enum = object->NumberOfEnumProperties();
+ Handle<Map> map(object->map());
+ int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
+
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
+
+ Handle<FixedArray> indices;
+ Handle<FixedArray> sort_array2;
+
+ if (cache_result) {
+ indices = isolate->factory()->NewFixedArray(num_enum);
+ sort_array2 = isolate->factory()->NewFixedArray(num_enum);
+ }
+
Handle<DescriptorArray> descs =
Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate);
+
for (int i = 0; i < descs->number_of_descriptors(); i++) {
if (descs->IsProperty(i) && !descs->IsDontEnum(i)) {
- (*storage)->set(index, descs->GetKey(i));
+ storage->set(index, descs->GetKey(i));
PropertyDetails details(descs->GetDetails(i));
- (*sort_array)->set(index, Smi::FromInt(details.index()));
+ sort_array->set(index, Smi::FromInt(details.index()));
+ if (!indices.is_null()) {
+ if (details.type() != FIELD) {
+ indices = Handle<FixedArray>();
+ sort_array2 = Handle<FixedArray>();
+ } else {
+ int field_index = Descriptor::IndexFromValue(descs->GetValue(i));
+ if (field_index >= map->inobject_properties()) {
+ field_index = -(field_index - map->inobject_properties() + 1);
+ }
+ indices->set(index, Smi::FromInt(field_index));
+ sort_array2->set(index, Smi::FromInt(details.index()));
+ }
+ }
index++;
}
}
- (*storage)->SortPairs(*sort_array, sort_array->length());
+ storage->SortPairs(*sort_array, sort_array->length());
+ if (!indices.is_null()) {
+ indices->SortPairs(*sort_array2, sort_array2->length());
+ }
if (cache_result) {
Handle<FixedArray> bridge_storage =
isolate->factory()->NewFixedArray(
DescriptorArray::kEnumCacheBridgeLength);
DescriptorArray* desc = object->map()->instance_descriptors();
- desc->SetEnumCache(*bridge_storage, *storage);
+ desc->SetEnumCache(*bridge_storage,
+ *storage,
+ indices.is_null() ? Object::cast(Smi::FromInt(0))
+ : Object::cast(*indices));
}
ASSERT(storage->length() == index);
return storage;
} else {
- int num_enum = object->NumberOfEnumProperties();
+ int num_enum = object->NumberOfLocalProperties(DONT_ENUM);
Handle<FixedArray> storage = isolate->factory()->NewFixedArray(num_enum);
Handle<FixedArray> sort_array = isolate->factory()->NewFixedArray(num_enum);
object->property_dictionary()->CopyEnumKeysTo(*storage, *sort_array);
@@ -920,4 +800,162 @@ Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
}
+// This method determines the type of string involved and then gets the UTF8
+// length of the string. It doesn't flatten the string and has log(n) recursion
+// for a string of length n. If the failure flag gets set, then we have to
+// flatten the string and retry. Failures are caused by surrogate pairs in deep
+// cons strings.
+
+// Single surrogate characters that are encountered in the UTF-16 character
+// sequence of the input string get counted as 3 UTF-8 bytes, because that
+// is the way that WriteUtf8 will encode them. Surrogate pairs are counted and
+// encoded as one 4-byte UTF-8 sequence.
+
+// This function conceptually uses recursion on the two halves of cons strings.
+// However, in order to avoid the recursion going too deep it recurses on the
+// second string of the cons, but iterates on the first substring (by manually
+// eliminating it as a tail recursion). This means it counts the UTF-8 length
+// from the end to the start, which makes no difference to the total.
+
+// Surrogate pairs are recognized even if they are split across two sides of a
+// cons, which complicates the implementation somewhat. Therefore, too deep
+// recursion cannot always be avoided. This case is detected, and the failure
+// flag is set, a signal to the caller that the string should be flattened and
+// the operation retried.
+int Utf8LengthHelper(String* input,
+ int from,
+ int to,
+ bool followed_by_surrogate,
+ int max_recursion,
+ bool* failure,
+ bool* starts_with_surrogate) {
+ if (from == to) return 0;
+ int total = 0;
+ bool dummy;
+ while (true) {
+ if (input->IsAsciiRepresentation()) {
+ *starts_with_surrogate = false;
+ return total + to - from;
+ }
+ switch (StringShape(input).representation_tag()) {
+ case kConsStringTag: {
+ ConsString* str = ConsString::cast(input);
+ String* first = str->first();
+ String* second = str->second();
+ int first_length = first->length();
+ if (first_length - from > to - first_length) {
+ if (first_length < to) {
+ // Right hand side is shorter. No need to check the recursion depth
+ // since this can only happen log(n) times.
+ bool right_starts_with_surrogate = false;
+ total += Utf8LengthHelper(second,
+ 0,
+ to - first_length,
+ followed_by_surrogate,
+ max_recursion - 1,
+ failure,
+ &right_starts_with_surrogate);
+ if (*failure) return 0;
+ followed_by_surrogate = right_starts_with_surrogate;
+ input = first;
+ to = first_length;
+ } else {
+ // We only need the left hand side.
+ input = first;
+ }
+ } else {
+ if (first_length > from) {
+ // Left hand side is shorter.
+ if (first->IsAsciiRepresentation()) {
+ total += first_length - from;
+ *starts_with_surrogate = false;
+ starts_with_surrogate = &dummy;
+ input = second;
+ from = 0;
+ to -= first_length;
+ } else if (second->IsAsciiRepresentation()) {
+ followed_by_surrogate = false;
+ total += to - first_length;
+ input = first;
+ to = first_length;
+ } else if (max_recursion > 0) {
+ bool right_starts_with_surrogate = false;
+ // Recursing on the long one. This may fail.
+ total += Utf8LengthHelper(second,
+ 0,
+ to - first_length,
+ followed_by_surrogate,
+ max_recursion - 1,
+ failure,
+ &right_starts_with_surrogate);
+ if (*failure) return 0;
+ input = first;
+ to = first_length;
+ followed_by_surrogate = right_starts_with_surrogate;
+ } else {
+ *failure = true;
+ return 0;
+ }
+ } else {
+ // We only need the right hand side.
+ input = second;
+ from = 0;
+ to -= first_length;
+ }
+ }
+ continue;
+ }
+ case kExternalStringTag:
+ case kSeqStringTag: {
+ Vector<const uc16> vector = input->GetFlatContent().ToUC16Vector();
+ const uc16* p = vector.start();
+ int previous = unibrow::Utf16::kNoPreviousCharacter;
+ for (int i = from; i < to; i++) {
+ uc16 c = p[i];
+ total += unibrow::Utf8::Length(c, previous);
+ previous = c;
+ }
+ if (to - from > 0) {
+ if (unibrow::Utf16::IsLeadSurrogate(previous) &&
+ followed_by_surrogate) {
+ total -= unibrow::Utf8::kBytesSavedByCombiningSurrogates;
+ }
+ if (unibrow::Utf16::IsTrailSurrogate(p[from])) {
+ *starts_with_surrogate = true;
+ }
+ }
+ return total;
+ }
+ case kSlicedStringTag: {
+ SlicedString* str = SlicedString::cast(input);
+ int offset = str->offset();
+ input = str->parent();
+ from += offset;
+ to += offset;
+ continue;
+ }
+ default:
+ break;
+ }
+ UNREACHABLE();
+ return 0;
+ }
+ return 0;
+}
+
+
+int Utf8Length(Handle<String> str) {
+ bool dummy;
+ bool failure;
+ int len;
+ const int kRecursionBudget = 100;
+ do {
+ failure = false;
+ len = Utf8LengthHelper(
+ *str, 0, str->length(), false, kRecursionBudget, &failure, &dummy);
+ if (failure) FlattenString(str);
+ } while (failure);
+ return len;
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/handles.h b/src/3rdparty/v8/src/handles.h
index c359cb3..960696b 100644
--- a/src/3rdparty/v8/src/handles.h
+++ b/src/3rdparty/v8/src/handles.h
@@ -167,18 +167,6 @@ class HandleScope {
// an object of expected type, or the handle is an error if running out
// of space or encountering an internal error.
-void NormalizeProperties(Handle<JSObject> object,
- PropertyNormalizationMode mode,
- int expected_additional_properties);
-Handle<NumberDictionary> NormalizeElements(Handle<JSObject> object);
-void TransformToFastProperties(Handle<JSObject> object,
- int unused_property_fields);
-MUST_USE_RESULT Handle<NumberDictionary> NumberDictionarySet(
- Handle<NumberDictionary> dictionary,
- uint32_t index,
- Handle<Object> value,
- PropertyDetails details);
-
// Flattens a string.
void FlattenString(Handle<String> str);
@@ -186,12 +174,7 @@ void FlattenString(Handle<String> str);
// string.
Handle<String> FlattenGetString(Handle<String> str);
-Handle<Object> SetProperty(Handle<JSReceiver> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode,
- bool skip_fallback_interceptor = false);
+int Utf8Length(Handle<String> str);
Handle<Object> SetProperty(Handle<Object> object,
Handle<Object> key,
@@ -204,46 +187,9 @@ Handle<Object> ForceSetProperty(Handle<JSObject> object,
Handle<Object> value,
PropertyAttributes attributes);
-Handle<Object> SetNormalizedProperty(Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyDetails details);
-
Handle<Object> ForceDeleteProperty(Handle<JSObject> object,
Handle<Object> key);
-Handle<Object> SetLocalPropertyIgnoreAttributes(
- Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes);
-
-// Used to set local properties on the object we totally control
-// and which therefore has no accessors and alikes.
-void SetLocalPropertyNoThrow(Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes = NONE);
-
-Handle<Object> SetPropertyWithInterceptor(Handle<JSObject> object,
- Handle<String> key,
- Handle<Object> value,
- PropertyAttributes attributes,
- StrictModeFlag strict_mode);
-
-MUST_USE_RESULT Handle<Object> SetElement(Handle<JSObject> object,
- uint32_t index,
- Handle<Object> value,
- StrictModeFlag strict_mode);
-
-Handle<Object> SetOwnElement(Handle<JSObject> object,
- uint32_t index,
- Handle<Object> value,
- StrictModeFlag strict_mode);
-
-Handle<Object> TransitionElementsKind(Handle<JSObject> object,
- ElementsKind to_kind);
-
Handle<Object> GetProperty(Handle<JSReceiver> obj,
const char* name);
@@ -255,21 +201,8 @@ Handle<Object> GetPropertyWithInterceptor(Handle<JSObject> receiver,
Handle<String> name,
PropertyAttributes* attributes);
-Handle<Object> GetPrototype(Handle<Object> obj);
-
Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value);
-// Sets a hidden property on an object. Returns obj on success, undefined
-// if trying to set the property on a detached proxy.
-Handle<Object> SetHiddenProperty(Handle<JSObject> obj,
- Handle<String> key,
- Handle<Object> value);
-
-int GetIdentityHash(Handle<JSReceiver> obj);
-
-Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
-Handle<Object> DeleteProperty(Handle<JSObject> obj, Handle<String> prop);
-
Handle<Object> LookupSingleCharacterStringFromCode(uint32_t index);
Handle<JSObject> Copy(Handle<JSObject> obj);
@@ -293,6 +226,7 @@ Handle<FixedArray> CalculateLineEnds(Handle<String> string,
int GetScriptLineNumber(Handle<Script> script, int code_position);
// The safe version does not make heap allocations but may work much slower.
int GetScriptLineNumberSafe(Handle<Script> script, int code_position);
+int GetScriptColumnNumber(Handle<Script> script, int code_position);
// Computes the enumerable keys from interceptors. Used for debug mirrors and
// by GetKeysInFixedArrayFor below.
@@ -322,7 +256,6 @@ Handle<String> SubString(Handle<String> str,
int end,
PretenureFlag pretenure = NOT_TENURED);
-
// Sets the expected number of properties for the function's instances.
void SetExpectedNofProperties(Handle<JSFunction> func, int nof);
@@ -341,8 +274,6 @@ Handle<JSGlobalProxy> ReinitializeJSGlobalProxy(
Handle<Object> SetPrototype(Handle<JSFunction> function,
Handle<Object> prototype);
-Handle<Object> PreventExtensions(Handle<JSObject> object);
-
Handle<ObjectHashSet> ObjectHashSetAdd(Handle<ObjectHashSet> table,
Handle<Object> key);
diff --git a/src/3rdparty/v8/src/hashmap.cc b/src/3rdparty/v8/src/hashmap.cc
deleted file mode 100644
index 37748a3..0000000
--- a/src/3rdparty/v8/src/hashmap.cc
+++ /dev/null
@@ -1,224 +0,0 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include "../include/v8stdint.h"
-#include "globals.h"
-#include "checks.h"
-#include "utils.h"
-#include "allocation.h"
-
-#include "hashmap.h"
-
-namespace v8 {
-namespace internal {
-
-Allocator HashMap::DefaultAllocator;
-
-
-HashMap::HashMap(MatchFun match,
- Allocator* allocator,
- uint32_t initial_capacity) {
- allocator_ = allocator;
- match_ = match;
- Initialize(initial_capacity);
-}
-
-
-HashMap::~HashMap() {
- if (allocator_) {
- allocator_->Delete(map_);
- }
-}
-
-
-HashMap::Entry* HashMap::Lookup(void* key, uint32_t hash, bool insert) {
- // Find a matching entry.
- Entry* p = Probe(key, hash);
- if (p->key != NULL) {
- return p;
- }
-
- // No entry found; insert one if necessary.
- if (insert) {
- p->key = key;
- p->value = NULL;
- p->hash = hash;
- occupancy_++;
-
- // Grow the map if we reached >= 80% occupancy.
- if (occupancy_ + occupancy_/4 >= capacity_) {
- Resize();
- p = Probe(key, hash);
- }
-
- return p;
- }
-
- // No entry found and none inserted.
- return NULL;
-}
-
-
-void HashMap::Remove(void* key, uint32_t hash) {
- // Lookup the entry for the key to remove.
- Entry* p = Probe(key, hash);
- if (p->key == NULL) {
- // Key not found nothing to remove.
- return;
- }
-
- // To remove an entry we need to ensure that it does not create an empty
- // entry that will cause the search for another entry to stop too soon. If all
- // the entries between the entry to remove and the next empty slot have their
- // initial position inside this interval, clearing the entry to remove will
- // not break the search. If, while searching for the next empty entry, an
- // entry is encountered which does not have its initial position between the
- // entry to remove and the position looked at, then this entry can be moved to
- // the place of the entry to remove without breaking the search for it. The
- // entry made vacant by this move is now the entry to remove and the process
- // starts over.
- // Algorithm from http://en.wikipedia.org/wiki/Open_addressing.
-
- // This guarantees loop termination as there is at least one empty entry so
- // eventually the removed entry will have an empty entry after it.
- ASSERT(occupancy_ < capacity_);
-
- // p is the candidate entry to clear. q is used to scan forwards.
- Entry* q = p; // Start at the entry to remove.
- while (true) {
- // Move q to the next entry.
- q = q + 1;
- if (q == map_end()) {
- q = map_;
- }
-
- // All entries between p and q have their initial position between p and q
- // and the entry p can be cleared without breaking the search for these
- // entries.
- if (q->key == NULL) {
- break;
- }
-
- // Find the initial position for the entry at position q.
- Entry* r = map_ + (q->hash & (capacity_ - 1));
-
- // If the entry at position q has its initial position outside the range
- // between p and q it can be moved forward to position p and will still be
- // found. There is now a new candidate entry for clearing.
- if ((q > p && (r <= p || r > q)) ||
- (q < p && (r <= p && r > q))) {
- *p = *q;
- p = q;
- }
- }
-
- // Clear the entry which is allowed to en emptied.
- p->key = NULL;
- occupancy_--;
-}
-
-
-void HashMap::Clear() {
- // Mark all entries as empty.
- const Entry* end = map_end();
- for (Entry* p = map_; p < end; p++) {
- p->key = NULL;
- }
- occupancy_ = 0;
-}
-
-
-HashMap::Entry* HashMap::Start() const {
- return Next(map_ - 1);
-}
-
-
-HashMap::Entry* HashMap::Next(Entry* p) const {
- const Entry* end = map_end();
- ASSERT(map_ - 1 <= p && p < end);
- for (p++; p < end; p++) {
- if (p->key != NULL) {
- return p;
- }
- }
- return NULL;
-}
-
-
-HashMap::Entry* HashMap::Probe(void* key, uint32_t hash) {
- ASSERT(key != NULL);
-
- ASSERT(IsPowerOf2(capacity_));
- Entry* p = map_ + (hash & (capacity_ - 1));
- const Entry* end = map_end();
- ASSERT(map_ <= p && p < end);
-
- ASSERT(occupancy_ < capacity_); // Guarantees loop termination.
- while (p->key != NULL && (hash != p->hash || !match_(key, p->key))) {
- p++;
- if (p >= end) {
- p = map_;
- }
- }
-
- return p;
-}
-
-
-void HashMap::Initialize(uint32_t capacity) {
- ASSERT(IsPowerOf2(capacity));
- map_ = reinterpret_cast<Entry*>(allocator_->New(capacity * sizeof(Entry)));
- if (map_ == NULL) {
- v8::internal::FatalProcessOutOfMemory("HashMap::Initialize");
- return;
- }
- capacity_ = capacity;
- Clear();
-}
-
-
-void HashMap::Resize() {
- Entry* map = map_;
- uint32_t n = occupancy_;
-
- // Allocate larger map.
- Initialize(capacity_ * 2);
-
- // Rehash all current entries.
- for (Entry* p = map; n > 0; p++) {
- if (p->key != NULL) {
- Lookup(p->key, p->hash, true)->value = p->value;
- n--;
- }
- }
-
- // Delete old map.
- allocator_->Delete(map);
-}
-
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/hashmap.h b/src/3rdparty/v8/src/hashmap.h
index 4e6a454..5aeb895 100644
--- a/src/3rdparty/v8/src/hashmap.h
+++ b/src/3rdparty/v8/src/hashmap.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,34 +29,22 @@
#define V8_HASHMAP_H_
#include "allocation.h"
+#include "checks.h"
+#include "utils.h"
namespace v8 {
namespace internal {
-
-// Allocator defines the memory allocator interface
-// used by HashMap and implements a default allocator.
-class Allocator BASE_EMBEDDED {
+template<class AllocationPolicy>
+class TemplateHashMapImpl {
public:
- virtual ~Allocator() {}
- virtual void* New(size_t size) { return Malloced::New(size); }
- virtual void Delete(void* p) { Malloced::Delete(p); }
-};
-
-
-class HashMap {
- public:
- static Allocator DefaultAllocator;
-
typedef bool (*MatchFun) (void* key1, void* key2);
// initial_capacity is the size of the initial hash map;
// it must be a power of 2 (and thus must not be 0).
- explicit HashMap(MatchFun match,
- Allocator* allocator = &DefaultAllocator,
- uint32_t initial_capacity = 8);
+ TemplateHashMapImpl(MatchFun match, uint32_t initial_capacity = 8);
- ~HashMap();
+ ~TemplateHashMapImpl();
// HashMap entries are (key, value, hash) triplets.
// Some clients may not need to use the value slot
@@ -100,7 +88,6 @@ class HashMap {
Entry* Next(Entry* p) const;
private:
- Allocator* allocator_;
MatchFun match_;
Entry* map_;
uint32_t capacity_;
@@ -112,6 +99,241 @@ class HashMap {
void Resize();
};
+typedef TemplateHashMapImpl<FreeStoreAllocationPolicy> HashMap;
+
+template<class P>
+TemplateHashMapImpl<P>::TemplateHashMapImpl(MatchFun match,
+ uint32_t initial_capacity) {
+ match_ = match;
+ Initialize(initial_capacity);
+}
+
+
+template<class P>
+TemplateHashMapImpl<P>::~TemplateHashMapImpl() {
+ P::Delete(map_);
+}
+
+
+template<class P>
+typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Lookup(
+ void* key, uint32_t hash, bool insert) {
+ // Find a matching entry.
+ Entry* p = Probe(key, hash);
+ if (p->key != NULL) {
+ return p;
+ }
+
+ // No entry found; insert one if necessary.
+ if (insert) {
+ p->key = key;
+ p->value = NULL;
+ p->hash = hash;
+ occupancy_++;
+
+ // Grow the map if we reached >= 80% occupancy.
+ if (occupancy_ + occupancy_/4 >= capacity_) {
+ Resize();
+ p = Probe(key, hash);
+ }
+
+ return p;
+ }
+
+ // No entry found and none inserted.
+ return NULL;
+}
+
+
+template<class P>
+void TemplateHashMapImpl<P>::Remove(void* key, uint32_t hash) {
+ // Lookup the entry for the key to remove.
+ Entry* p = Probe(key, hash);
+ if (p->key == NULL) {
+ // Key not found nothing to remove.
+ return;
+ }
+
+ // To remove an entry we need to ensure that it does not create an empty
+ // entry that will cause the search for another entry to stop too soon. If all
+ // the entries between the entry to remove and the next empty slot have their
+ // initial position inside this interval, clearing the entry to remove will
+ // not break the search. If, while searching for the next empty entry, an
+ // entry is encountered which does not have its initial position between the
+ // entry to remove and the position looked at, then this entry can be moved to
+ // the place of the entry to remove without breaking the search for it. The
+ // entry made vacant by this move is now the entry to remove and the process
+ // starts over.
+ // Algorithm from http://en.wikipedia.org/wiki/Open_addressing.
+
+ // This guarantees loop termination as there is at least one empty entry so
+ // eventually the removed entry will have an empty entry after it.
+ ASSERT(occupancy_ < capacity_);
+
+ // p is the candidate entry to clear. q is used to scan forwards.
+ Entry* q = p; // Start at the entry to remove.
+ while (true) {
+ // Move q to the next entry.
+ q = q + 1;
+ if (q == map_end()) {
+ q = map_;
+ }
+
+ // All entries between p and q have their initial position between p and q
+ // and the entry p can be cleared without breaking the search for these
+ // entries.
+ if (q->key == NULL) {
+ break;
+ }
+
+ // Find the initial position for the entry at position q.
+ Entry* r = map_ + (q->hash & (capacity_ - 1));
+
+ // If the entry at position q has its initial position outside the range
+ // between p and q it can be moved forward to position p and will still be
+ // found. There is now a new candidate entry for clearing.
+ if ((q > p && (r <= p || r > q)) ||
+ (q < p && (r <= p && r > q))) {
+ *p = *q;
+ p = q;
+ }
+ }
+
+ // Clear the entry which is allowed to en emptied.
+ p->key = NULL;
+ occupancy_--;
+}
+
+
+template<class P>
+void TemplateHashMapImpl<P>::Clear() {
+ // Mark all entries as empty.
+ const Entry* end = map_end();
+ for (Entry* p = map_; p < end; p++) {
+ p->key = NULL;
+ }
+ occupancy_ = 0;
+}
+
+
+template<class P>
+typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Start() const {
+ return Next(map_ - 1);
+}
+
+
+template<class P>
+typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Next(Entry* p)
+ const {
+ const Entry* end = map_end();
+ ASSERT(map_ - 1 <= p && p < end);
+ for (p++; p < end; p++) {
+ if (p->key != NULL) {
+ return p;
+ }
+ }
+ return NULL;
+}
+
+
+template<class P>
+typename TemplateHashMapImpl<P>::Entry* TemplateHashMapImpl<P>::Probe(void* key,
+ uint32_t hash) {
+ ASSERT(key != NULL);
+
+ ASSERT(IsPowerOf2(capacity_));
+ Entry* p = map_ + (hash & (capacity_ - 1));
+ const Entry* end = map_end();
+ ASSERT(map_ <= p && p < end);
+
+ ASSERT(occupancy_ < capacity_); // Guarantees loop termination.
+ while (p->key != NULL && (hash != p->hash || !match_(key, p->key))) {
+ p++;
+ if (p >= end) {
+ p = map_;
+ }
+ }
+
+ return p;
+}
+
+
+template<class P>
+void TemplateHashMapImpl<P>::Initialize(uint32_t capacity) {
+ ASSERT(IsPowerOf2(capacity));
+ map_ = reinterpret_cast<Entry*>(P::New(capacity * sizeof(Entry)));
+ if (map_ == NULL) {
+ v8::internal::FatalProcessOutOfMemory("HashMap::Initialize");
+ return;
+ }
+ capacity_ = capacity;
+ Clear();
+}
+
+
+template<class P>
+void TemplateHashMapImpl<P>::Resize() {
+ Entry* map = map_;
+ uint32_t n = occupancy_;
+
+ // Allocate larger map.
+ Initialize(capacity_ * 2);
+
+ // Rehash all current entries.
+ for (Entry* p = map; n > 0; p++) {
+ if (p->key != NULL) {
+ Lookup(p->key, p->hash, true)->value = p->value;
+ n--;
+ }
+ }
+
+ // Delete old map.
+ P::Delete(map);
+}
+
+
+// A hash map for pointer keys and values with an STL-like interface.
+template<class Key, class Value, class AllocationPolicy>
+class TemplateHashMap: private TemplateHashMapImpl<AllocationPolicy> {
+ public:
+ STATIC_ASSERT(sizeof(Key*) == sizeof(void*)); // NOLINT
+ STATIC_ASSERT(sizeof(Value*) == sizeof(void*)); // NOLINT
+ struct value_type {
+ Key* first;
+ Value* second;
+ };
+
+ class Iterator {
+ public:
+ Iterator& operator++() {
+ entry_ = map_->Next(entry_);
+ return *this;
+ }
+
+ value_type* operator->() { return reinterpret_cast<value_type*>(entry_); }
+ bool operator!=(const Iterator& other) { return entry_ != other.entry_; }
+
+ private:
+ Iterator(const TemplateHashMapImpl<AllocationPolicy>* map,
+ typename TemplateHashMapImpl<AllocationPolicy>::Entry* entry) :
+ map_(map), entry_(entry) { }
+
+ const TemplateHashMapImpl<AllocationPolicy>* map_;
+ typename TemplateHashMapImpl<AllocationPolicy>::Entry* entry_;
+
+ friend class TemplateHashMap;
+ };
+
+ TemplateHashMap(
+ typename TemplateHashMapImpl<AllocationPolicy>::MatchFun match)
+ : TemplateHashMapImpl<AllocationPolicy>(match) { }
+
+ Iterator begin() const { return Iterator(this, this->Start()); }
+ Iterator end() const { return Iterator(this, NULL); }
+ Iterator find(Key* key, bool insert = false) {
+ return Iterator(this, this->Lookup(key, key->Hash(), insert));
+ }
+};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/heap-inl.h b/src/3rdparty/v8/src/heap-inl.h
index f615796..706d288 100644
--- a/src/3rdparty/v8/src/heap-inl.h
+++ b/src/3rdparty/v8/src/heap-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -32,6 +32,7 @@
#include "isolate.h"
#include "list-inl.h"
#include "objects.h"
+#include "platform.h"
#include "v8-counters.h"
#include "store-buffer.h"
#include "store-buffer-inl.h"
@@ -40,12 +41,30 @@ namespace v8 {
namespace internal {
void PromotionQueue::insert(HeapObject* target, int size) {
+ if (emergency_stack_ != NULL) {
+ emergency_stack_->Add(Entry(target, size));
+ return;
+ }
+
if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
NewSpacePage* rear_page =
NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
ASSERT(!rear_page->prev_page()->is_anchor());
- rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->body_limit());
+ rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
+ ActivateGuardIfOnTheSamePage();
}
+
+ if (guard_) {
+ ASSERT(GetHeadPage() ==
+ Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
+
+ if ((rear_ - 2) < limit_) {
+ RelocateQueueHead();
+ emergency_stack_->Add(Entry(target, size));
+ return;
+ }
+ }
+
*(--rear_) = reinterpret_cast<intptr_t>(target);
*(--rear_) = size;
// Assert no overflow into live objects.
@@ -56,8 +75,10 @@ void PromotionQueue::insert(HeapObject* target, int size) {
}
-int Heap::MaxObjectSizeInPagedSpace() {
- return Page::kMaxHeapObjectSize;
+void PromotionQueue::ActivateGuardIfOnTheSamePage() {
+ guard_ = guard_ ||
+ heap_->new_space()->active_space()->current_page()->address() ==
+ GetHeadPage()->address();
}
@@ -94,18 +115,18 @@ MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
// Allocate string.
Object* result;
- { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
+ { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
: old_data_space_->AllocateRaw(size);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<HeapObject*>(result)->set_map(map);
+ // String maps are all immortal immovable objects.
+ reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
// Set length and hash fields of the allocated string.
String* answer = String::cast(result);
answer->set_length(str.length());
answer->set_hash_field(hash_field);
- SeqString::cast(answer)->set_symbol_id(0);
ASSERT_EQ(size, answer->Size());
@@ -128,7 +149,7 @@ MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
// Allocate string.
Object* result;
- { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
+ { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
: old_data_space_->AllocateRaw(size);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -139,7 +160,6 @@ MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
String* answer = String::cast(result);
answer->set_length(str.length());
answer->set_hash_field(hash_field);
- SeqString::cast(answer)->set_symbol_id(0);
ASSERT_EQ(size, answer->Size());
@@ -206,51 +226,36 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes,
}
-MaybeObject* Heap::NumberFromInt32(int32_t value) {
+MaybeObject* Heap::NumberFromInt32(
+ int32_t value, PretenureFlag pretenure) {
if (Smi::IsValid(value)) return Smi::FromInt(value);
// Bypass NumberFromDouble to avoid various redundant checks.
- return AllocateHeapNumber(FastI2D(value));
+ return AllocateHeapNumber(FastI2D(value), pretenure);
}
-MaybeObject* Heap::NumberFromUint32(uint32_t value) {
+MaybeObject* Heap::NumberFromUint32(
+ uint32_t value, PretenureFlag pretenure) {
if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
return Smi::FromInt((int32_t)value);
}
// Bypass NumberFromDouble to avoid various redundant checks.
- return AllocateHeapNumber(FastUI2D(value));
+ return AllocateHeapNumber(FastUI2D(value), pretenure);
}
-void Heap::FinalizeExternalString(HeapObject* string) {
- ASSERT(string->IsExternalString() || string->map()->has_external_resource());
-
- if (string->IsExternalString()) {
- v8::String::ExternalStringResourceBase** resource_addr =
- reinterpret_cast<v8::String::ExternalStringResourceBase**>(
- reinterpret_cast<byte*>(string) +
- ExternalString::kResourceOffset -
- kHeapObjectTag);
-
- // Dispose of the C++ object if it has not already been disposed.
- if (*resource_addr != NULL) {
- (*resource_addr)->Dispose();
- }
-
- // Clear the resource pointer in the string.
+void Heap::FinalizeExternalString(String* string) {
+ ASSERT(string->IsExternalString());
+ v8::String::ExternalStringResourceBase** resource_addr =
+ reinterpret_cast<v8::String::ExternalStringResourceBase**>(
+ reinterpret_cast<byte*>(string) +
+ ExternalString::kResourceOffset -
+ kHeapObjectTag);
+
+ // Dispose of the C++ object if it has not already been disposed.
+ if (*resource_addr != NULL) {
+ (*resource_addr)->Dispose();
*resource_addr = NULL;
- } else {
- JSObject *object = JSObject::cast(string);
- Object *value = object->GetExternalResourceObject();
- v8::Object::ExternalResource *resource = 0;
- if (value->IsSmi()) {
- resource = reinterpret_cast<v8::Object::ExternalResource*>(Internals::GetExternalPointerFromSmi(value));
- } else if (value->IsForeign()) {
- resource = reinterpret_cast<v8::Object::ExternalResource*>(Foreign::cast(value)->foreign_address());
- }
- if (resource) {
- resource->Dispose();
- }
}
}
@@ -429,8 +434,10 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
}
-bool Heap::CollectGarbage(AllocationSpace space) {
- return CollectGarbage(space, SelectGarbageCollector(space));
+bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
+ const char* collector_reason = NULL;
+ GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
+ return CollectGarbage(space, collector, gc_reason, collector_reason);
}
@@ -454,7 +461,7 @@ MaybeObject* Heap::PrepareForCompare(String* str) {
int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
- ASSERT(HasBeenSetup());
+ ASSERT(HasBeenSetUp());
int amount = amount_of_external_allocated_memory_ + change_in_bytes;
if (change_in_bytes >= 0) {
// Avoid overflow.
@@ -465,7 +472,7 @@ int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
amount_of_external_allocated_memory_ -
amount_of_external_allocated_memory_at_last_global_gc_;
if (amount_since_last_global_gc > external_allocation_limit_) {
- CollectAllGarbage(kNoGCFlags);
+ CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
}
} else {
// Avoid underflow.
@@ -496,7 +503,6 @@ Isolate* Heap::isolate() {
#define GC_GREEDY_CHECK() { }
#endif
-
// Calls the FUNCTION_CALL function and retries it up to three times
// to guarantee that any allocations performed during the call will
// succeed if there's enough memory.
@@ -515,7 +521,8 @@ Isolate* Heap::isolate() {
} \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
- allocation_space()); \
+ allocation_space(), \
+ "allocation failure"); \
__maybe_object__ = FUNCTION_CALL; \
if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
if (__maybe_object__->IsOutOfMemory()) { \
@@ -523,7 +530,7 @@ Isolate* Heap::isolate() {
} \
if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
- ISOLATE->heap()->CollectAllAvailableGarbage(); \
+ ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
{ \
AlwaysAllocateScope __scope__; \
__maybe_object__ = FUNCTION_CALL; \
@@ -570,16 +577,6 @@ void ExternalStringTable::AddString(String* string) {
}
-void ExternalStringTable::AddObject(HeapObject* object) {
- ASSERT(object->map()->has_external_resource());
- if (heap_->InNewSpace(object)) {
- new_space_strings_.Add(object);
- } else {
- old_space_strings_.Add(object);
- }
-}
-
-
void ExternalStringTable::Iterate(ObjectVisitor* v) {
if (!new_space_strings_.is_empty()) {
Object** start = &new_space_strings_[0];
@@ -598,24 +595,24 @@ void ExternalStringTable::Verify() {
#ifdef DEBUG
for (int i = 0; i < new_space_strings_.length(); ++i) {
ASSERT(heap_->InNewSpace(new_space_strings_[i]));
- ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_null_value());
+ ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
}
for (int i = 0; i < old_space_strings_.length(); ++i) {
ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
- ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_null_value());
+ ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
}
#endif
}
-void ExternalStringTable::AddOldObject(HeapObject* object) {
- ASSERT(object->IsExternalString() || object->map()->has_external_resource());
- ASSERT(!heap_->InNewSpace(object));
- old_space_strings_.Add(object);
+void ExternalStringTable::AddOldString(String* string) {
+ ASSERT(string->IsExternalString());
+ ASSERT(!heap_->InNewSpace(string));
+ old_space_strings_.Add(string);
}
-void ExternalStringTable::ShrinkNewObjects(int position) {
+void ExternalStringTable::ShrinkNewStrings(int position) {
new_space_strings_.Rewind(position);
if (FLAG_verify_heap) {
Verify();
@@ -662,15 +659,15 @@ double TranscendentalCache::SubCache::Calculate(double input) {
case ATAN:
return atan(input);
case COS:
- return cos(input);
+ return fast_cos(input);
case EXP:
return exp(input);
case LOG:
- return log(input);
+ return fast_log(input);
case SIN:
- return sin(input);
+ return fast_sin(input);
case TAN:
- return tan(input);
+ return fast_tan(input);
default:
return 0.0; // Never happens.
}
@@ -702,9 +699,92 @@ MaybeObject* TranscendentalCache::SubCache::Get(double input) {
}
-Heap* _inline_get_heap_() {
- return HEAP;
+AlwaysAllocateScope::AlwaysAllocateScope() {
+ // We shouldn't hit any nested scopes, because that requires
+ // non-handle code to call handle code. The code still works but
+ // performance will degrade, so we want to catch this situation
+ // in debug mode.
+ ASSERT(HEAP->always_allocate_scope_depth_ == 0);
+ HEAP->always_allocate_scope_depth_++;
+}
+
+
+AlwaysAllocateScope::~AlwaysAllocateScope() {
+ HEAP->always_allocate_scope_depth_--;
+ ASSERT(HEAP->always_allocate_scope_depth_ == 0);
+}
+
+
+LinearAllocationScope::LinearAllocationScope() {
+ HEAP->linear_allocation_scope_depth_++;
+}
+
+
+LinearAllocationScope::~LinearAllocationScope() {
+ HEAP->linear_allocation_scope_depth_--;
+ ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
+}
+
+
+#ifdef DEBUG
+void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
+ for (Object** current = start; current < end; current++) {
+ if ((*current)->IsHeapObject()) {
+ HeapObject* object = HeapObject::cast(*current);
+ ASSERT(HEAP->Contains(object));
+ ASSERT(object->map()->IsMap());
+ }
+ }
}
+#endif
+
+
+double GCTracer::SizeOfHeapObjects() {
+ return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
+}
+
+
+#ifdef DEBUG
+DisallowAllocationFailure::DisallowAllocationFailure() {
+ old_state_ = HEAP->disallow_allocation_failure_;
+ HEAP->disallow_allocation_failure_ = true;
+}
+
+
+DisallowAllocationFailure::~DisallowAllocationFailure() {
+ HEAP->disallow_allocation_failure_ = old_state_;
+}
+#endif
+
+
+#ifdef DEBUG
+AssertNoAllocation::AssertNoAllocation() {
+ old_state_ = HEAP->allow_allocation(false);
+}
+
+
+AssertNoAllocation::~AssertNoAllocation() {
+ HEAP->allow_allocation(old_state_);
+}
+
+
+DisableAssertNoAllocation::DisableAssertNoAllocation() {
+ old_state_ = HEAP->allow_allocation(true);
+}
+
+
+DisableAssertNoAllocation::~DisableAssertNoAllocation() {
+ HEAP->allow_allocation(old_state_);
+}
+
+#else
+
+AssertNoAllocation::AssertNoAllocation() { }
+AssertNoAllocation::~AssertNoAllocation() { }
+DisableAssertNoAllocation::DisableAssertNoAllocation() { }
+DisableAssertNoAllocation::~DisableAssertNoAllocation() { }
+
+#endif
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/heap-profiler.cc b/src/3rdparty/v8/src/heap-profiler.cc
index 46c63c2..8be6f27 100644
--- a/src/3rdparty/v8/src/heap-profiler.cc
+++ b/src/3rdparty/v8/src/heap-profiler.cc
@@ -51,7 +51,7 @@ void HeapProfiler::ResetSnapshots() {
}
-void HeapProfiler::Setup() {
+void HeapProfiler::SetUp() {
Isolate* isolate = Isolate::Current();
if (isolate->heap_profiler() == NULL) {
isolate->set_heap_profiler(new HeapProfiler());
diff --git a/src/3rdparty/v8/src/heap-profiler.h b/src/3rdparty/v8/src/heap-profiler.h
index b1bc91c..ef5c4f4 100644
--- a/src/3rdparty/v8/src/heap-profiler.h
+++ b/src/3rdparty/v8/src/heap-profiler.h
@@ -48,7 +48,7 @@ class HeapSnapshotsCollection;
// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
class HeapProfiler {
public:
- static void Setup();
+ static void SetUp();
static void TearDown();
static HeapSnapshot* TakeSnapshot(const char* name,
diff --git a/src/3rdparty/v8/src/heap.cc b/src/3rdparty/v8/src/heap.cc
index de3187e..6070723 100644
--- a/src/3rdparty/v8/src/heap.cc
+++ b/src/3rdparty/v8/src/heap.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,6 +42,7 @@
#include "natives.h"
#include "objects-visiting.h"
#include "objects-visiting-inl.h"
+#include "once.h"
#include "runtime-profiler.h"
#include "scopeinfo.h"
#include "snapshot.h"
@@ -61,9 +62,6 @@ namespace v8 {
namespace internal {
-static Mutex* gc_initializer_mutex = OS::CreateMutex();
-
-
Heap::Heap()
: isolate_(NULL),
// semispace_size_ should be a power of 2 and old_generation_size_ should be
@@ -80,9 +78,9 @@ Heap::Heap()
#endif
reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
- initial_semispace_size_(Max(LUMP_OF_MEMORY, Page::kPageSize)),
+ initial_semispace_size_(Page::kPageSize),
max_old_generation_size_(700ul * LUMP_OF_MEMORY),
- max_executable_size_(128l * LUMP_OF_MEMORY),
+ max_executable_size_(256l * LUMP_OF_MEMORY),
// Variables set based on semispace_size_ and old_generation_size_ in
// ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
@@ -93,6 +91,7 @@ Heap::Heap()
always_allocate_scope_depth_(0),
linear_allocation_scope_depth_(0),
contexts_disposed_(0),
+ global_ic_age_(0),
scan_on_scavenge_pages_(0),
new_space_(this),
old_pointer_space_(NULL),
@@ -105,6 +104,7 @@ Heap::Heap()
gc_post_processing_depth_(0),
ms_count_(0),
gc_count_(0),
+ remembered_unmapped_pages_index_(0),
unflattened_strings_length_(0),
#ifdef DEBUG
allocation_allowed_(true),
@@ -112,6 +112,7 @@ Heap::Heap()
disallow_allocation_failure_(false),
debug_utils_(NULL),
#endif // DEBUG
+ new_space_high_promotion_mode_active_(false),
old_gen_promotion_limit_(kMinimumPromotionLimit),
old_gen_allocation_limit_(kMinimumAllocationLimit),
old_gen_limit_factor_(1),
@@ -143,6 +144,11 @@ Heap::Heap()
number_idle_notifications_(0),
last_idle_notification_gc_count_(0),
last_idle_notification_gc_count_init_(false),
+ mark_sweeps_since_idle_round_started_(0),
+ ms_count_at_last_idle_notification_(0),
+ gc_count_at_last_idle_gc_(0),
+ scavenges_since_last_idle_round_(kIdleScavengeThreshold),
+ promotion_queue_(this),
configured_(false),
chunks_queued_for_free_(NULL) {
// Allow build-time customization of the max semispace size. Building
@@ -169,7 +175,7 @@ Heap::Heap()
intptr_t Heap::Capacity() {
- if (!HasBeenSetup()) return 0;
+ if (!HasBeenSetUp()) return 0;
return new_space_.Capacity() +
old_pointer_space_->Capacity() +
@@ -181,7 +187,7 @@ intptr_t Heap::Capacity() {
intptr_t Heap::CommittedMemory() {
- if (!HasBeenSetup()) return 0;
+ if (!HasBeenSetUp()) return 0;
return new_space_.CommittedMemory() +
old_pointer_space_->CommittedMemory() +
@@ -193,14 +199,14 @@ intptr_t Heap::CommittedMemory() {
}
intptr_t Heap::CommittedMemoryExecutable() {
- if (!HasBeenSetup()) return 0;
+ if (!HasBeenSetUp()) return 0;
return isolate()->memory_allocator()->SizeExecutable();
}
intptr_t Heap::Available() {
- if (!HasBeenSetup()) return 0;
+ if (!HasBeenSetUp()) return 0;
return new_space_.Available() +
old_pointer_space_->Available() +
@@ -211,7 +217,7 @@ intptr_t Heap::Available() {
}
-bool Heap::HasBeenSetup() {
+bool Heap::HasBeenSetUp() {
return old_pointer_space_ != NULL &&
old_data_space_ != NULL &&
code_space_ != NULL &&
@@ -229,16 +235,19 @@ int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
}
-GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
+GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
+ const char** reason) {
// Is global GC requested?
if (space != NEW_SPACE || FLAG_gc_global) {
isolate_->counters()->gc_compactor_caused_by_request()->Increment();
+ *reason = "GC in old space requested";
return MARK_COMPACTOR;
}
// Is enough data promoted to justify a global GC?
if (OldGenerationPromotionLimitReached()) {
isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
+ *reason = "promotion limit reached";
return MARK_COMPACTOR;
}
@@ -246,6 +255,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
if (old_gen_exhausted_) {
isolate_->counters()->
gc_compactor_caused_by_oldspace_exhaustion()->Increment();
+ *reason = "old generations exhausted";
return MARK_COMPACTOR;
}
@@ -261,10 +271,12 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
isolate_->counters()->
gc_compactor_caused_by_oldspace_exhaustion()->Increment();
+ *reason = "scavenge might not succeed";
return MARK_COMPACTOR;
}
// Default
+ *reason = NULL;
return SCAVENGER;
}
@@ -424,17 +436,17 @@ void Heap::GarbageCollectionEpilogue() {
}
-void Heap::CollectAllGarbage(int flags) {
+void Heap::CollectAllGarbage(int flags, const char* gc_reason) {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
mark_compact_collector_.SetFlags(flags);
- CollectGarbage(OLD_POINTER_SPACE);
+ CollectGarbage(OLD_POINTER_SPACE, gc_reason);
mark_compact_collector_.SetFlags(kNoGCFlags);
}
-void Heap::CollectAllAvailableGarbage() {
+void Heap::CollectAllAvailableGarbage(const char* gc_reason) {
// Since we are ignoring the return value, the exact choice of space does
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
@@ -446,18 +458,27 @@ void Heap::CollectAllAvailableGarbage() {
// Note: as weak callbacks can execute arbitrary code, we cannot
// hope that eventually there will be no weak callbacks invocations.
// Therefore stop recollecting after several attempts.
- mark_compact_collector()->SetFlags(kMakeHeapIterableMask);
+ mark_compact_collector()->SetFlags(kMakeHeapIterableMask |
+ kReduceMemoryFootprintMask);
+ isolate_->compilation_cache()->Clear();
const int kMaxNumberOfAttempts = 7;
for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
- if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
+ if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR, gc_reason, NULL)) {
break;
}
}
mark_compact_collector()->SetFlags(kNoGCFlags);
+ new_space_.Shrink();
+ UncommitFromSpace();
+ Shrink();
+ incremental_marking()->UncommitMarkingDeque();
}
-bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
+bool Heap::CollectGarbage(AllocationSpace space,
+ GarbageCollector collector,
+ const char* gc_reason,
+ const char* collector_reason) {
// The VM is in the GC state until exiting this function.
VMState state(isolate_, GC);
@@ -477,19 +498,26 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
}
if (collector == MARK_COMPACTOR &&
- !mark_compact_collector()->PreciseSweepingRequired() &&
+ !mark_compact_collector()->abort_incremental_marking_ &&
!incremental_marking()->IsStopped() &&
!incremental_marking()->should_hurry() &&
FLAG_incremental_marking_steps) {
- if (FLAG_trace_incremental_marking) {
- PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
+ // Make progress in incremental marking.
+ const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB;
+ incremental_marking()->Step(kStepSizeWhenDelayedByScavenge,
+ IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ if (!incremental_marking()->IsComplete()) {
+ if (FLAG_trace_incremental_marking) {
+ PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
+ }
+ collector = SCAVENGER;
+ collector_reason = "incremental marking delaying mark-sweep";
}
- collector = SCAVENGER;
}
bool next_gc_likely_to_collect_more = false;
- { GCTracer tracer(this);
+ { GCTracer tracer(this, gc_reason, collector_reason);
GarbageCollectionPrologue();
// The GC count was incremented in the prologue. Tell the tracer about
// it.
@@ -521,7 +549,7 @@ bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
void Heap::PerformScavenge() {
- GCTracer tracer(this);
+ GCTracer tracer(this, NULL, NULL);
if (incremental_marking()->IsStopped()) {
PerformGarbageCollection(SCAVENGER, &tracer);
} else {
@@ -539,7 +567,7 @@ class SymbolTableVerifier : public ObjectVisitor {
for (Object** p = start; p < end; p++) {
if ((*p)->IsHeapObject()) {
// Check that the symbol is actually a symbol.
- ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
+ ASSERT((*p)->IsTheHole() || (*p)->IsUndefined() || (*p)->IsSymbol());
}
}
}
@@ -555,6 +583,17 @@ static void VerifySymbolTable() {
}
+static bool AbortIncrementalMarkingAndCollectGarbage(
+ Heap* heap,
+ AllocationSpace space,
+ const char* gc_reason = NULL) {
+ heap->mark_compact_collector()->SetFlags(Heap::kAbortIncrementalMarkingMask);
+ bool result = heap->CollectGarbage(space, gc_reason);
+ heap->mark_compact_collector()->SetFlags(Heap::kNoGCFlags);
+ return result;
+}
+
+
void Heap::ReserveSpace(
int new_space_size,
int pointer_space_size,
@@ -571,30 +610,38 @@ void Heap::ReserveSpace(
PagedSpace* cell_space = Heap::cell_space();
LargeObjectSpace* lo_space = Heap::lo_space();
bool gc_performed = true;
- while (gc_performed) {
+ int counter = 0;
+ static const int kThreshold = 20;
+ while (gc_performed && counter++ < kThreshold) {
gc_performed = false;
if (!new_space->ReserveSpace(new_space_size)) {
- Heap::CollectGarbage(NEW_SPACE);
+ Heap::CollectGarbage(NEW_SPACE,
+ "failed to reserve space in the new space");
gc_performed = true;
}
if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
- Heap::CollectGarbage(OLD_POINTER_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, OLD_POINTER_SPACE,
+ "failed to reserve space in the old pointer space");
gc_performed = true;
}
if (!(old_data_space->ReserveSpace(data_space_size))) {
- Heap::CollectGarbage(OLD_DATA_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, OLD_DATA_SPACE,
+ "failed to reserve space in the old data space");
gc_performed = true;
}
if (!(code_space->ReserveSpace(code_space_size))) {
- Heap::CollectGarbage(CODE_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, CODE_SPACE,
+ "failed to reserve space in the code space");
gc_performed = true;
}
if (!(map_space->ReserveSpace(map_space_size))) {
- Heap::CollectGarbage(MAP_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, MAP_SPACE,
+ "failed to reserve space in the map space");
gc_performed = true;
}
if (!(cell_space->ReserveSpace(cell_space_size))) {
- Heap::CollectGarbage(CELL_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, CELL_SPACE,
+ "failed to reserve space in the cell space");
gc_performed = true;
}
// We add a slack-factor of 2 in order to have space for a series of
@@ -606,10 +653,16 @@ void Heap::ReserveSpace(
large_object_size += cell_space_size + map_space_size + code_space_size +
data_space_size + pointer_space_size;
if (!(lo_space->ReserveSpace(large_object_size))) {
- Heap::CollectGarbage(LO_SPACE);
+ AbortIncrementalMarkingAndCollectGarbage(this, LO_SPACE,
+ "failed to reserve space in the large object space");
gc_performed = true;
}
}
+
+ if (gc_performed) {
+ // Failed to reserve the space after several attempts.
+ V8::FatalProcessOutOfMemory("Heap::ReserveSpace");
+ }
}
@@ -632,13 +685,17 @@ void Heap::ClearJSFunctionResultCaches() {
Object* context = global_contexts_list_;
while (!context->IsUndefined()) {
- // Get the caches for this context:
- FixedArray* caches =
- Context::cast(context)->jsfunction_result_caches();
- // Clear the caches:
- int length = caches->length();
- for (int i = 0; i < length; i++) {
- JSFunctionResultCache::cast(caches->get(i))->Clear();
+ // Get the caches for this context. GC can happen when the context
+ // is not fully initialized, so the caches can be undefined.
+ Object* caches_or_undefined =
+ Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX);
+ if (!caches_or_undefined->IsUndefined()) {
+ FixedArray* caches = FixedArray::cast(caches_or_undefined);
+ // Clear the caches:
+ int length = caches->length();
+ for (int i = 0; i < length; i++) {
+ JSFunctionResultCache::cast(caches->get(i))->Clear();
+ }
}
// Get the next context:
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
@@ -655,7 +712,13 @@ void Heap::ClearNormalizedMapCaches() {
Object* context = global_contexts_list_;
while (!context->IsUndefined()) {
- Context::cast(context)->normalized_map_cache()->Clear();
+ // GC can happen when the context is not fully initialized,
+ // so the cache can be undefined.
+ Object* cache =
+ Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX);
+ if (!cache->IsUndefined()) {
+ NormalizedMapCache::cast(cache)->Clear();
+ }
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
}
@@ -666,12 +729,18 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
(static_cast<double>(young_survivors_after_last_gc_) * 100) /
start_new_space_size;
- if (survival_rate > kYoungSurvivalRateThreshold) {
+ if (survival_rate > kYoungSurvivalRateHighThreshold) {
high_survival_rate_period_length_++;
} else {
high_survival_rate_period_length_ = 0;
}
+ if (survival_rate < kYoungSurvivalRateLowThreshold) {
+ low_survival_rate_period_length_++;
+ } else {
+ low_survival_rate_period_length_ = 0;
+ }
+
double survival_rate_diff = survival_rate_ - survival_rate;
if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
@@ -760,6 +829,37 @@ bool Heap::PerformGarbageCollection(GarbageCollector collector,
UpdateSurvivalRateTrend(start_new_space_size);
}
+ if (!new_space_high_promotion_mode_active_ &&
+ new_space_.Capacity() == new_space_.MaximumCapacity() &&
+ IsStableOrIncreasingSurvivalTrend() &&
+ IsHighSurvivalRate()) {
+ // Stable high survival rates even though young generation is at
+ // maximum capacity indicates that most objects will be promoted.
+ // To decrease scavenger pauses and final mark-sweep pauses, we
+ // have to limit maximal capacity of the young generation.
+ new_space_high_promotion_mode_active_ = true;
+ if (FLAG_trace_gc) {
+ PrintF("Limited new space size due to high promotion rate: %d MB\n",
+ new_space_.InitialCapacity() / MB);
+ }
+ } else if (new_space_high_promotion_mode_active_ &&
+ IsStableOrDecreasingSurvivalTrend() &&
+ IsLowSurvivalRate()) {
+ // Decreasing low survival rates might indicate that the above high
+ // promotion mode is over and we should allow the young generation
+ // to grow again.
+ new_space_high_promotion_mode_active_ = false;
+ if (FLAG_trace_gc) {
+ PrintF("Unlimited new space size due to low promotion rate: %d MB\n",
+ new_space_.MaximumCapacity() / MB);
+ }
+ }
+
+ if (new_space_high_promotion_mode_active_ &&
+ new_space_.Capacity() > new_space_.InitialCapacity()) {
+ new_space_.Shrink();
+ }
+
isolate_->counters()->objs_since_last_young()->Set(0);
gc_post_processing_depth_++;
@@ -819,6 +919,8 @@ void Heap::MarkCompact(GCTracer* tracer) {
isolate_->counters()->objs_since_last_full()->Set(0);
contexts_disposed_ = 0;
+
+ isolate_->set_context_exit_happened(false);
}
@@ -834,8 +936,7 @@ void Heap::MarkCompactPrologue() {
CompletelyClearInstanceofCache();
- // TODO(1605) select heuristic for flushing NumberString cache with
- // FlushNumberStringCache
+ FlushNumberStringCache();
if (FLAG_cleanup_code_caches_at_gc) {
polymorphic_code_cache()->set_cache(undefined_value());
}
@@ -912,9 +1013,11 @@ static void VerifyNonPointerSpacePointers() {
void Heap::CheckNewSpaceExpansionCriteria() {
if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
- survived_since_last_expansion_ > new_space_.Capacity()) {
- // Grow the size of new space if there is room to grow and enough
- // data has survived scavenge since the last expansion.
+ survived_since_last_expansion_ > new_space_.Capacity() &&
+ !new_space_high_promotion_mode_active_) {
+ // Grow the size of new space if there is room to grow, enough data
+ // has survived scavenge since the last expansion and we are not in
+ // high promotion mode.
new_space_.Grow();
survived_since_last_expansion_ = 0;
}
@@ -969,7 +1072,7 @@ void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) {
// Store Buffer overflowed while scanning promoted objects. These are not
// in any particular page, though they are likely to be clustered by the
// allocation routines.
- store_buffer_->HandleFullness();
+ store_buffer_->EnsureSpace(StoreBuffer::kStoreBufferSize);
} else {
// Store Buffer overflowed while scanning a particular old space page for
// pointers to new space.
@@ -985,6 +1088,42 @@ void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) {
}
+void PromotionQueue::Initialize() {
+ // Assumes that a NewSpacePage exactly fits a number of promotion queue
+ // entries (where each is a pair of intptr_t). This allows us to simplify
+ // the test fpr when to switch pages.
+ ASSERT((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize)
+ == 0);
+ limit_ = reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceStart());
+ front_ = rear_ =
+ reinterpret_cast<intptr_t*>(heap_->new_space()->ToSpaceEnd());
+ emergency_stack_ = NULL;
+ guard_ = false;
+}
+
+
+void PromotionQueue::RelocateQueueHead() {
+ ASSERT(emergency_stack_ == NULL);
+
+ Page* p = Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
+ intptr_t* head_start = rear_;
+ intptr_t* head_end =
+ Min(front_, reinterpret_cast<intptr_t*>(p->area_end()));
+
+ int entries_count =
+ static_cast<int>(head_end - head_start) / kEntrySizeInWords;
+
+ emergency_stack_ = new List<Entry>(2 * entries_count);
+
+ while (head_start != head_end) {
+ int size = static_cast<int>(*(head_start++));
+ HeapObject* obj = reinterpret_cast<HeapObject*>(*(head_start++));
+ emergency_stack_->Add(Entry(obj, size));
+ }
+ rear_ = head_end;
+}
+
+
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
@@ -999,7 +1138,7 @@ void Heap::Scavenge() {
isolate_->descriptor_lookup_cache()->Clear();
// Used for updating survived_since_last_expansion_ at function end.
- intptr_t survived_watermark = PromotedSpaceSize();
+ intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
CheckNewSpaceExpansionCriteria();
@@ -1007,8 +1146,7 @@ void Heap::Scavenge() {
incremental_marking()->PrepareForScavenge();
- old_pointer_space()->AdvanceSweeper(new_space_.Size());
- old_data_space()->AdvanceSweeper(new_space_.Size());
+ AdvanceSweepers(static_cast<int>(new_space_.Size()));
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
@@ -1033,7 +1171,7 @@ void Heap::Scavenge() {
// frees up its size in bytes from the top of the new space, and
// objects are at least one pointer in size.
Address new_space_front = new_space_.ToSpaceStart();
- promotion_queue_.Initialize(new_space_.ToSpaceEnd());
+ promotion_queue_.Initialize();
#ifdef DEBUG
store_buffer()->Clean();
@@ -1073,12 +1211,15 @@ void Heap::Scavenge() {
&scavenge_visitor);
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
-
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
+ promotion_queue_.Destroy();
+
LiveObjectList::UpdateReferencesForScavengeGC();
- isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ if (!FLAG_watch_ic_patching) {
+ isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
+ }
incremental_marking()->UpdateMarkingDequeAfterScavenge();
ASSERT(new_space_front == new_space_.top());
@@ -1091,26 +1232,28 @@ void Heap::Scavenge() {
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
- (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
+ (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
LOG(isolate_, ResourceEvent("scavenge", "end"));
gc_state_ = NOT_IN_GC;
+
+ scavenges_since_last_idle_round_++;
}
-HeapObject* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
- Object** p) {
+String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
+ Object** p) {
MapWord first_word = HeapObject::cast(*p)->map_word();
if (!first_word.IsForwardingAddress()) {
// Unreachable external string can be finalized.
- heap->FinalizeExternalString(HeapObject::cast(*p));
+ heap->FinalizeExternalString(String::cast(*p));
return NULL;
}
// String is still reachable.
- return HeapObject::cast(first_word.ToForwardingAddress());
+ return String::cast(first_word.ToForwardingAddress());
}
@@ -1128,11 +1271,11 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
for (Object** p = start; p < end; ++p) {
ASSERT(InFromSpace(*p));
- HeapObject* target = updater_func(this, p);
+ String* target = updater_func(this, p);
if (target == NULL) continue;
- ASSERT(target->IsExternalString() || target->map()->has_external_resource());
+ ASSERT(target->IsExternalString());
if (InNewSpace(target)) {
// String is still in new space. Update the table entry.
@@ -1140,12 +1283,12 @@ void Heap::UpdateNewSpaceReferencesInExternalStringTable(
++last;
} else {
// String got promoted. Move it to the old string list.
- external_string_table_.AddOldObject(target);
+ external_string_table_.AddOldString(target);
}
}
ASSERT(last <= end);
- external_string_table_.ShrinkNewObjects(static_cast<int>(last - start));
+ external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
}
@@ -1261,6 +1404,28 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
}
+void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
+ AssertNoAllocation no_allocation;
+
+ class VisitorAdapter : public ObjectVisitor {
+ public:
+ explicit VisitorAdapter(v8::ExternalResourceVisitor* visitor)
+ : visitor_(visitor) {}
+ virtual void VisitPointers(Object** start, Object** end) {
+ for (Object** p = start; p < end; p++) {
+ if ((*p)->IsExternalString()) {
+ visitor_->VisitExternalString(Utils::ToLocal(
+ Handle<String>(String::cast(*p))));
+ }
+ }
+ }
+ private:
+ v8::ExternalResourceVisitor* visitor_;
+ } visitor_adapter(visitor);
+ external_string_table_.Iterate(&visitor_adapter);
+}
+
+
class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
public:
static inline void VisitPointer(Heap* heap, Object** p) {
@@ -1286,7 +1451,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
NewSpaceScavenger::IterateBody(object->map(), object);
} else {
new_space_front =
- NewSpacePage::FromLimit(new_space_front)->next_page()->body();
+ NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
}
}
@@ -1411,10 +1576,10 @@ class ScavengingVisitor : public StaticVisitorBase {
// Helper function used by CopyObject to copy a source object to an
// allocated target object and update the forwarding pointer in the source
// object. Returns the target object.
- INLINE(static HeapObject* MigrateObject(Heap* heap,
- HeapObject* source,
- HeapObject* target,
- int size)) {
+ INLINE(static void MigrateObject(Heap* heap,
+ HeapObject* source,
+ HeapObject* target,
+ int size)) {
// Copy the content of source to target.
heap->CopyBlock(target->address(), source->address(), size);
@@ -1437,11 +1602,9 @@ class ScavengingVisitor : public StaticVisitorBase {
if (marks_handling == TRANSFER_MARKS) {
if (Marking::TransferColor(source, target)) {
- MemoryChunk::IncrementLiveBytes(target->address(), size);
+ MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
}
}
-
- return target;
}
template<ObjectContents object_contents, SizeRestriction size_restriction>
@@ -1450,7 +1613,7 @@ class ScavengingVisitor : public StaticVisitorBase {
HeapObject* object,
int object_size) {
SLOW_ASSERT((size_restriction != SMALL) ||
- (object_size <= Page::kMaxHeapObjectSize));
+ (object_size <= Page::kMaxNonCodeHeapObjectSize));
SLOW_ASSERT(object->Size() == object_size);
Heap* heap = map->GetHeap();
@@ -1458,7 +1621,7 @@ class ScavengingVisitor : public StaticVisitorBase {
MaybeObject* maybe_result;
if ((size_restriction != SMALL) &&
- (object_size > Page::kMaxHeapObjectSize)) {
+ (object_size > Page::kMaxNonCodeHeapObjectSize)) {
maybe_result = heap->lo_space()->AllocateRaw(object_size,
NOT_EXECUTABLE);
} else {
@@ -1472,7 +1635,12 @@ class ScavengingVisitor : public StaticVisitorBase {
Object* result = NULL; // Initialization to please compiler.
if (maybe_result->ToObject(&result)) {
HeapObject* target = HeapObject::cast(result);
- *slot = MigrateObject(heap, object , target, object_size);
+
+ // Order is important: slot might be inside of the target if target
+ // was allocated over a dead object and slot comes from the store
+ // buffer.
+ *slot = target;
+ MigrateObject(heap, object, target, object_size);
if (object_contents == POINTER_OBJECT) {
heap->promotion_queue()->insert(target, object_size);
@@ -1483,9 +1651,15 @@ class ScavengingVisitor : public StaticVisitorBase {
}
}
MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
+ heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
Object* result = allocation->ToObjectUnchecked();
+ HeapObject* target = HeapObject::cast(result);
- *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
+ // Order is important: slot might be inside of the target if target
+ // was allocated over a dead object and slot comes from the store
+ // buffer.
+ *slot = target;
+ MigrateObject(heap, object, target, object_size);
return;
}
@@ -1723,18 +1897,18 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
}
Map* map = reinterpret_cast<Map*>(result);
- map->set_map(meta_map());
+ map->set_map_no_write_barrier(meta_map());
map->set_instance_type(instance_type);
map->set_visitor_id(
StaticVisitorBase::GetVisitorId(instance_type, instance_size));
- map->set_prototype(null_value());
- map->set_constructor(null_value());
+ map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
+ map->set_constructor(null_value(), SKIP_WRITE_BARRIER);
map->set_instance_size(instance_size);
map->set_inobject_properties(0);
map->set_pre_allocated_property_fields(0);
map->init_instance_descriptors();
- map->set_code_cache(empty_fixed_array());
- map->set_prototype_transitions(empty_fixed_array());
+ map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
+ map->set_prototype_transitions(empty_fixed_array(), SKIP_WRITE_BARRIER);
map->set_unused_property_fields(0);
map->set_bit_field(0);
map->set_bit_field2(1 << Map::kIsExtensible);
@@ -1751,13 +1925,12 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type,
MaybeObject* Heap::AllocateCodeCache() {
- Object* result;
- { MaybeObject* maybe_result = AllocateStruct(CODE_CACHE_TYPE);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ CodeCache* code_cache;
+ { MaybeObject* maybe_code_cache = AllocateStruct(CODE_CACHE_TYPE);
+ if (!maybe_code_cache->To(&code_cache)) return maybe_code_cache;
}
- CodeCache* code_cache = CodeCache::cast(result);
- code_cache->set_default_cache(empty_fixed_array());
- code_cache->set_normal_type_cache(undefined_value());
+ code_cache->set_default_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
+ code_cache->set_normal_type_cache(undefined_value(), SKIP_WRITE_BARRIER);
return code_cache;
}
@@ -1767,6 +1940,40 @@ MaybeObject* Heap::AllocatePolymorphicCodeCache() {
}
+MaybeObject* Heap::AllocateAccessorPair() {
+ AccessorPair* accessors;
+ { MaybeObject* maybe_accessors = AllocateStruct(ACCESSOR_PAIR_TYPE);
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
+ }
+ accessors->set_getter(the_hole_value(), SKIP_WRITE_BARRIER);
+ accessors->set_setter(the_hole_value(), SKIP_WRITE_BARRIER);
+ return accessors;
+}
+
+
+MaybeObject* Heap::AllocateTypeFeedbackInfo() {
+ TypeFeedbackInfo* info;
+ { MaybeObject* maybe_info = AllocateStruct(TYPE_FEEDBACK_INFO_TYPE);
+ if (!maybe_info->To(&info)) return maybe_info;
+ }
+ info->set_ic_total_count(0);
+ info->set_ic_with_type_info_count(0);
+ info->set_type_feedback_cells(TypeFeedbackCells::cast(empty_fixed_array()),
+ SKIP_WRITE_BARRIER);
+ return info;
+}
+
+
+MaybeObject* Heap::AllocateAliasedArgumentsEntry(int aliased_context_slot) {
+ AliasedArgumentsEntry* entry;
+ { MaybeObject* maybe_entry = AllocateStruct(ALIASED_ARGUMENTS_ENTRY_TYPE);
+ if (!maybe_entry->To(&entry)) return maybe_entry;
+ }
+ entry->set_aliased_context_slot(aliased_context_slot);
+ return entry;
+}
+
+
const Heap::StringTypeTable Heap::string_type_table[] = {
#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
{type, size, k##camel_name##MapRootIndex},
@@ -1871,7 +2078,7 @@ bool Heap::CreateInitialMaps() {
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_serialized_scope_info_map(Map::cast(obj));
+ set_scope_info_map(Map::cast(obj));
{ MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
if (!maybe_obj->ToObject(&obj)) return false;
@@ -2050,6 +2257,12 @@ bool Heap::CreateInitialMaps() {
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
+ set_module_context_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
Map* global_context_map = Map::cast(obj);
global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
set_global_context_map(global_context_map);
@@ -2074,7 +2287,7 @@ bool Heap::CreateInitialMaps() {
MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate heap numbers in paged
// spaces.
- STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
+ STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Object* result;
@@ -2083,7 +2296,7 @@ MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(heap_number_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
HeapNumber::cast(result)->set_value(value);
return result;
}
@@ -2095,13 +2308,13 @@ MaybeObject* Heap::AllocateHeapNumber(double value) {
// This version of AllocateHeapNumber is optimized for
// allocation in new space.
- STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
+ STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize);
ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
Object* result;
{ MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(heap_number_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map());
HeapNumber::cast(result)->set_value(value);
return result;
}
@@ -2112,7 +2325,8 @@ MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
{ MaybeObject* maybe_result = AllocateRawCell();
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(global_property_cell_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(
+ global_property_cell_map());
JSGlobalPropertyCell::cast(result)->set_value(value);
return result;
}
@@ -2215,6 +2429,10 @@ bool Heap::CreateInitialObjects() {
}
set_infinity_value(HeapNumber::cast(obj));
+ // The hole has not been created yet, but we want to put something
+ // predictable in the gaps in the symbol table, so lets make that Smi zero.
+ set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0)));
+
// Allocate initial symbol table.
{ MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
if (!maybe_obj->ToObject(&obj)) return false;
@@ -2258,34 +2476,26 @@ bool Heap::CreateInitialObjects() {
set_the_hole_value(Oddball::cast(obj));
{ MaybeObject* maybe_obj = CreateOddball("arguments_marker",
- Smi::FromInt(-2),
+ Smi::FromInt(-4),
Oddball::kArgumentMarker);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_arguments_marker(Oddball::cast(obj));
{ MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
- Smi::FromInt(-3),
+ Smi::FromInt(-2),
Oddball::kOther);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_no_interceptor_result_sentinel(obj);
{ MaybeObject* maybe_obj = CreateOddball("termination_exception",
- Smi::FromInt(-4),
+ Smi::FromInt(-3),
Oddball::kOther);
if (!maybe_obj->ToObject(&obj)) return false;
}
set_termination_exception(obj);
- { MaybeObject* maybe_obj = CreateOddball("frame_alignment_marker",
- Smi::FromInt(-5),
- Oddball::kOther);
- if (!maybe_obj->ToObject(&obj)) return false;
- }
- set_frame_alignment_marker(Oddball::cast(obj));
- STATIC_ASSERT(Oddball::kLeastHiddenOddballNumber == -5);
-
// Allocate the empty string.
{ MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
if (!maybe_obj->ToObject(&obj)) return false;
@@ -2321,17 +2531,18 @@ bool Heap::CreateInitialObjects() {
// Allocate the code_stubs dictionary. The initial size is set to avoid
// expanding the dictionary during bootstrapping.
- { MaybeObject* maybe_obj = NumberDictionary::Allocate(128);
+ { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(128);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_code_stubs(NumberDictionary::cast(obj));
+ set_code_stubs(UnseededNumberDictionary::cast(obj));
+
// Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
// is set to avoid expanding the dictionary during bootstrapping.
- { MaybeObject* maybe_obj = NumberDictionary::Allocate(64);
+ { MaybeObject* maybe_obj = UnseededNumberDictionary::Allocate(64);
if (!maybe_obj->ToObject(&obj)) return false;
}
- set_non_monomorphic_cache(NumberDictionary::cast(obj));
+ set_non_monomorphic_cache(UnseededNumberDictionary::cast(obj));
{ MaybeObject* maybe_obj = AllocatePolymorphicCodeCache();
if (!maybe_obj->ToObject(&obj)) return false;
@@ -2354,7 +2565,10 @@ bool Heap::CreateInitialObjects() {
}
set_intrinsic_function_names(StringDictionary::cast(obj));
- if (InitializeNumberStringCache()->IsFailure()) return false;
+ { MaybeObject* maybe_obj = AllocateInitialNumberStringCache();
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_number_string_cache(FixedArray::cast(obj));
// Allocate cache for single character ASCII strings.
{ MaybeObject* maybe_obj =
@@ -2453,7 +2667,7 @@ void StringSplitCache::Enter(Heap* heap,
}
}
}
- array->set_map(heap->fixed_cow_array_map());
+ array->set_map_no_write_barrier(heap->fixed_cow_array_map());
}
@@ -2464,20 +2678,44 @@ void StringSplitCache::Clear(FixedArray* cache) {
}
-MaybeObject* Heap::InitializeNumberStringCache() {
- // Compute the size of the number string cache based on the max heap size.
- // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
- // max_semispace_size_ == 8 MB => number_string_cache_size = 16KB.
- int number_string_cache_size = max_semispace_size_ / 512;
- number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
- Object* obj;
+MaybeObject* Heap::AllocateInitialNumberStringCache() {
MaybeObject* maybe_obj =
- AllocateFixedArray(number_string_cache_size * 2, TENURED);
- if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj));
+ AllocateFixedArray(kInitialNumberStringCacheSize * 2, TENURED);
return maybe_obj;
}
+int Heap::FullSizeNumberStringCacheLength() {
+ // Compute the size of the number string cache based on the max newspace size.
+ // The number string cache has a minimum size based on twice the initial cache
+ // size to ensure that it is bigger after being made 'full size'.
+ int number_string_cache_size = max_semispace_size_ / 512;
+ number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
+ Min(0x4000, number_string_cache_size));
+ // There is a string and a number per entry so the length is twice the number
+ // of entries.
+ return number_string_cache_size * 2;
+}
+
+
+void Heap::AllocateFullSizeNumberStringCache() {
+ // The idea is to have a small number string cache in the snapshot to keep
+ // boot-time memory usage down. If we expand the number string cache already
+ // while creating the snapshot then that didn't work out.
+ ASSERT(!Serializer::enabled());
+ MaybeObject* maybe_obj =
+ AllocateFixedArray(FullSizeNumberStringCacheLength(), TENURED);
+ Object* new_cache;
+ if (maybe_obj->ToObject(&new_cache)) {
+ // We don't bother to repopulate the cache with entries from the old cache.
+ // It will be repopulated soon enough with new strings.
+ set_number_string_cache(FixedArray::cast(new_cache));
+ }
+ // If allocation fails then we just return without doing anything. It is only
+ // a cache, so best effort is OK here.
+}
+
+
void Heap::FlushNumberStringCache() {
// Flush the number to string cache.
int len = number_string_cache()->length();
@@ -2523,11 +2761,17 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
int mask = (number_string_cache()->length() >> 1) - 1;
if (number->IsSmi()) {
hash = smi_get_hash(Smi::cast(number)) & mask;
- number_string_cache()->set(hash * 2, Smi::cast(number));
} else {
hash = double_get_hash(number->Number()) & mask;
- number_string_cache()->set(hash * 2, number);
}
+ if (number_string_cache()->get(hash * 2) != undefined_value() &&
+ number_string_cache()->length() != FullSizeNumberStringCacheLength()) {
+ // The first time we have a hash collision, we move to the full sized
+ // number string cache.
+ AllocateFullSizeNumberStringCache();
+ return;
+ }
+ number_string_cache()->set(hash * 2, number);
number_string_cache()->set(hash * 2 + 1, string);
}
@@ -2627,7 +2871,7 @@ MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate foreigns in paged spaces.
- STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize);
+ STATIC_ASSERT(Foreign::kSize <= Page::kMaxNonCodeHeapObjectSize);
AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Foreign* result;
MaybeObject* maybe_result = Allocate(foreign_map(), space);
@@ -2646,18 +2890,20 @@ MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_name(name);
Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
share->set_code(illegal);
- share->set_scope_info(SerializedScopeInfo::Empty());
+ share->set_scope_info(ScopeInfo::Empty());
Code* construct_stub =
isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
share->set_construct_stub(construct_stub);
share->set_instance_class_name(Object_symbol());
- share->set_function_data(undefined_value());
- share->set_script(undefined_value());
- share->set_debug_info(undefined_value());
- share->set_inferred_name(empty_string());
- share->set_initial_map(undefined_value());
- share->set_this_property_assignments(undefined_value());
- share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
+ share->set_function_data(undefined_value(), SKIP_WRITE_BARRIER);
+ share->set_script(undefined_value(), SKIP_WRITE_BARRIER);
+ share->set_debug_info(undefined_value(), SKIP_WRITE_BARRIER);
+ share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
+ share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
+ share->set_this_property_assignments(undefined_value(), SKIP_WRITE_BARRIER);
+ share->set_ast_node_count(0);
+ share->set_deopt_counter(FLAG_deopt_every_n_times);
+ share->set_ic_age(0);
// Set integer fields (smi or int, depending on the architecture).
share->set_length(0);
@@ -2688,8 +2934,8 @@ MaybeObject* Heap::AllocateJSMessageObject(String* type,
if (!maybe_result->ToObject(&result)) return maybe_result;
}
JSMessageObject* message = JSMessageObject::cast(result);
- message->set_properties(Heap::empty_fixed_array());
- message->set_elements(Heap::empty_fixed_array());
+ message->set_properties(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
+ message->set_elements(Heap::empty_fixed_array(), SKIP_WRITE_BARRIER);
message->set_type(type);
message->set_arguments(arguments);
message->set_start_position(start_position);
@@ -2780,8 +3026,8 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
bool is_ascii_data_in_two_byte_string = false;
if (!is_ascii) {
// At least one of the strings uses two-byte representation so we
- // can't use the fast case code for short ascii strings below, but
- // we can try to save memory if all chars actually fit in ascii.
+ // can't use the fast case code for short ASCII strings below, but
+ // we can try to save memory if all chars actually fit in ASCII.
is_ascii_data_in_two_byte_string =
first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
if (is_ascii_data_in_two_byte_string) {
@@ -2790,9 +3036,9 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
}
// If the resulting string is small make a flat string.
- if (length < String::kMinNonFlatLength) {
+ if (length < ConsString::kMinLength) {
// Note that neither of the two inputs can be a slice because:
- STATIC_ASSERT(String::kMinNonFlatLength <= SlicedString::kMinLength);
+ STATIC_ASSERT(ConsString::kMinLength <= SlicedString::kMinLength);
ASSERT(first->IsFlat());
ASSERT(second->IsFlat());
if (is_ascii) {
@@ -2805,14 +3051,14 @@ MaybeObject* Heap::AllocateConsString(String* first, String* second) {
// Copy first part.
const char* src;
if (first->IsExternalString()) {
- src = ExternalAsciiString::cast(first)->resource()->data();
+ src = ExternalAsciiString::cast(first)->GetChars();
} else {
src = SeqAsciiString::cast(first)->GetChars();
}
for (int i = 0; i < first_length; i++) *dest++ = src[i];
// Copy second part.
if (second->IsExternalString()) {
- src = ExternalAsciiString::cast(second)->resource()->data();
+ src = ExternalAsciiString::cast(second)->GetChars();
} else {
src = SeqAsciiString::cast(second)->GetChars();
}
@@ -2868,7 +3114,7 @@ MaybeObject* Heap::AllocateSubString(String* buffer,
int end,
PretenureFlag pretenure) {
int length = end - start;
- if (length == 0) {
+ if (length <= 0) {
return empty_string();
} else if (length == 1) {
return LookupSingleCharacterStringFromCode(buffer->Get(start));
@@ -3043,13 +3289,14 @@ MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
}
int size = ByteArray::SizeFor(length);
Object* result;
- { MaybeObject* maybe_result = (size <= MaxObjectSizeInPagedSpace())
+ { MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
? old_data_space_->AllocateRaw(size)
: lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
+ reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
+ byte_array_map());
reinterpret_cast<ByteArray*>(result)->set_length(length);
return result;
}
@@ -3061,13 +3308,14 @@ MaybeObject* Heap::AllocateByteArray(int length) {
}
int size = ByteArray::SizeFor(length);
AllocationSpace space =
- (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE;
+ (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
Object* result;
{ MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
+ reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier(
+ byte_array_map());
reinterpret_cast<ByteArray*>(result)->set_length(length);
return result;
}
@@ -3077,11 +3325,11 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
if (size == 0) return;
HeapObject* filler = HeapObject::FromAddress(addr);
if (size == kPointerSize) {
- filler->set_map(one_pointer_filler_map());
+ filler->set_map_no_write_barrier(one_pointer_filler_map());
} else if (size == 2 * kPointerSize) {
- filler->set_map(two_pointer_filler_map());
+ filler->set_map_no_write_barrier(two_pointer_filler_map());
} else {
- filler->set_map(free_space_map());
+ filler->set_map_no_write_barrier(free_space_map());
FreeSpace::cast(filler)->set_size(size);
}
}
@@ -3099,7 +3347,7 @@ MaybeObject* Heap::AllocateExternalArray(int length,
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<ExternalArray*>(result)->set_map(
+ reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier(
MapForExternalArrayType(array_type));
reinterpret_cast<ExternalArray*>(result)->set_length(length);
reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
@@ -3115,10 +3363,9 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
bool immovable) {
// Allocate ByteArray before the Code object, so that we do not risk
// leaving uninitialized Code object (and breaking the heap).
- Object* reloc_info;
- { MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
- if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info;
- }
+ ByteArray* reloc_info;
+ MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
+ if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info;
// Compute size.
int body_size = RoundUp(desc.instr_size, kObjectAlignment);
@@ -3127,7 +3374,7 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
MaybeObject* maybe_result;
// Large code objects and code objects which should stay at a fixed address
// are allocated in large object space.
- if (obj_size > MaxObjectSizeInPagedSpace() || immovable) {
+ if (obj_size > code_space()->AreaSize() || immovable) {
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
} else {
maybe_result = code_space_->AllocateRaw(obj_size);
@@ -3137,18 +3384,21 @@ MaybeObject* Heap::CreateCode(const CodeDesc& desc,
if (!maybe_result->ToObject(&result)) return maybe_result;
// Initialize the object
- HeapObject::cast(result)->set_map(code_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(code_map());
Code* code = Code::cast(result);
ASSERT(!isolate_->code_range()->exists() ||
isolate_->code_range()->contains(code->address()));
code->set_instruction_size(desc.instr_size);
- code->set_relocation_info(ByteArray::cast(reloc_info));
+ code->set_relocation_info(reloc_info);
code->set_flags(flags);
if (code->is_call_stub() || code->is_keyed_call_stub()) {
code->set_check_type(RECEIVER_MAP_CHECK);
}
- code->set_deoptimization_data(empty_fixed_array());
- code->set_next_code_flushing_candidate(undefined_value());
+ code->set_deoptimization_data(empty_fixed_array(), SKIP_WRITE_BARRIER);
+ code->set_type_feedback_info(undefined_value(), SKIP_WRITE_BARRIER);
+ code->set_handler_table(empty_fixed_array(), SKIP_WRITE_BARRIER);
+ code->set_gc_metadata(Smi::FromInt(0));
+ code->set_ic_age(global_ic_age_);
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@@ -3174,7 +3424,7 @@ MaybeObject* Heap::CopyCode(Code* code) {
// Allocate an object the same size as the code object.
int obj_size = code->Size();
MaybeObject* maybe_result;
- if (obj_size > MaxObjectSizeInPagedSpace()) {
+ if (obj_size > code_space()->AreaSize()) {
maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
} else {
maybe_result = code_space_->AllocateRaw(obj_size);
@@ -3217,7 +3467,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
static_cast<size_t>(code->instruction_end() - old_addr);
MaybeObject* maybe_result;
- if (new_obj_size > MaxObjectSizeInPagedSpace()) {
+ if (new_obj_size > code_space()->AreaSize()) {
maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
} else {
maybe_result = code_space_->AllocateRaw(new_obj_size);
@@ -3264,7 +3514,8 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
AllocateRaw(map->instance_size(), space, retry_space);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(map);
+ // No need for write barrier since object is white and map is in old space.
+ HeapObject::cast(result)->set_map_no_write_barrier(map);
return result;
}
@@ -3338,7 +3589,7 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
JSObject* boilerplate;
int arguments_object_size;
bool strict_mode_callee = callee->IsJSFunction() &&
- JSFunction::cast(callee)->shared()->strict_mode();
+ !JSFunction::cast(callee)->shared()->is_classic_mode();
if (strict_mode_callee) {
boilerplate =
isolate()->context()->global_context()->
@@ -3489,8 +3740,8 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj,
// TODO(1240798): Initialize the object's body using valid initial values
// according to the object's initial map. For example, if the map's
// instance type is JS_ARRAY_TYPE, the length field should be initialized
- // to a number (eg, Smi::FromInt(0)) and the elements initialized to a
- // fixed array (eg, Heap::empty_fixed_array()). Currently, the object
+ // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a
+ // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
// verification code has to cope with (temporarily) invalid objects. See
// for example, JSArray::JSArrayVerify).
Object* filler;
@@ -3537,7 +3788,7 @@ MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
// Allocate the JSObject.
AllocationSpace space =
(pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
- if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
+ if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE;
Object* obj;
{ MaybeObject* maybe_obj = Allocate(map, space);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -3565,8 +3816,8 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
Map::cast(initial_map)->set_constructor(constructor);
}
// Allocate the object based on the constructors initial map.
- MaybeObject* result =
- AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
+ MaybeObject* result = AllocateJSObjectFromMap(
+ constructor->initial_map(), pretenure);
#ifdef DEBUG
// Make sure result is NOT a global object if valid.
Object* non_failure;
@@ -3576,6 +3827,64 @@ MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
}
+MaybeObject* Heap::AllocateJSArrayAndStorage(
+ ElementsKind elements_kind,
+ int length,
+ int capacity,
+ ArrayStorageAllocationMode mode,
+ PretenureFlag pretenure) {
+ ASSERT(capacity >= length);
+ MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
+ JSArray* array;
+ if (!maybe_array->To(&array)) return maybe_array;
+
+ if (capacity == 0) {
+ array->set_length(Smi::FromInt(0));
+ array->set_elements(empty_fixed_array());
+ return array;
+ }
+
+ FixedArrayBase* elms;
+ MaybeObject* maybe_elms = NULL;
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
+ maybe_elms = AllocateUninitializedFixedDoubleArray(capacity);
+ } else {
+ ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
+ maybe_elms = AllocateFixedDoubleArrayWithHoles(capacity);
+ }
+ } else {
+ ASSERT(elements_kind == FAST_ELEMENTS ||
+ elements_kind == FAST_SMI_ONLY_ELEMENTS);
+ if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
+ maybe_elms = AllocateUninitializedFixedArray(capacity);
+ } else {
+ ASSERT(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
+ maybe_elms = AllocateFixedArrayWithHoles(capacity);
+ }
+ }
+ if (!maybe_elms->To(&elms)) return maybe_elms;
+
+ array->set_elements(elms);
+ array->set_length(Smi::FromInt(length));
+ return array;
+}
+
+
+MaybeObject* Heap::AllocateJSArrayWithElements(
+ FixedArrayBase* elements,
+ ElementsKind elements_kind,
+ PretenureFlag pretenure) {
+ MaybeObject* maybe_array = AllocateJSArray(elements_kind, pretenure);
+ JSArray* array;
+ if (!maybe_array->To(&array)) return maybe_array;
+
+ array->set_elements(elements);
+ array->set_length(Smi::FromInt(elements->length()));
+ return array;
+}
+
+
MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
// Allocate map.
// TODO(rossberg): Once we optimize proxies, think about a scheme to share
@@ -3591,7 +3900,7 @@ MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
if (!maybe_result->To<JSProxy>(&result)) return maybe_result;
result->InitializeBody(map->instance_size(), Smi::FromInt(0));
result->set_handler(handler);
- result->set_hash(undefined_value());
+ result->set_hash(undefined_value(), SKIP_WRITE_BARRIER);
return result;
}
@@ -3615,7 +3924,7 @@ MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result;
result->InitializeBody(map->instance_size(), Smi::FromInt(0));
result->set_handler(handler);
- result->set_hash(undefined_value());
+ result->set_hash(undefined_value(), SKIP_WRITE_BARRIER);
result->set_call_trap(call_trap);
result->set_construct_trap(construct_trap);
return result;
@@ -3683,7 +3992,7 @@ MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
}
Map* new_map = Map::cast(obj);
- // Setup the global object as a normalized object.
+ // Set up the global object as a normalized object.
global->set_map(new_map);
global->map()->clear_instance_descriptors();
global->set_properties(dictionary);
@@ -3875,8 +4184,6 @@ MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
PretenureFlag pretenure) {
- // V8 only supports characters in the Basic Multilingual Plane.
- const uc32 kMaxSupportedChar = 0xFFFF;
// Count the number of characters in the UTF-8 string and check if
// it is an ASCII string.
Access<UnicodeCache::Utf8Decoder>
@@ -3884,8 +4191,12 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
decoder->Reset(string.start(), string.length());
int chars = 0;
while (decoder->has_more()) {
- decoder->GetNext();
- chars++;
+ uint32_t r = decoder->GetNext();
+ if (r <= unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ chars++;
+ } else {
+ chars += 2;
+ }
}
Object* result;
@@ -3896,10 +4207,15 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
// Convert and copy the characters into the new object.
String* string_result = String::cast(result);
decoder->Reset(string.start(), string.length());
- for (int i = 0; i < chars; i++) {
- uc32 r = decoder->GetNext();
- if (r > kMaxSupportedChar) { r = unibrow::Utf8::kBadChar; }
- string_result->Set(i, r);
+ int i = 0;
+ while (i < chars) {
+ uint32_t r = decoder->GetNext();
+ if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ string_result->Set(i++, unibrow::Utf16::LeadSurrogate(r));
+ string_result->Set(i++, unibrow::Utf16::TrailSurrogate(r));
+ } else {
+ string_result->Set(i++, r);
+ }
}
return result;
}
@@ -3932,31 +4248,22 @@ Map* Heap::SymbolMapForString(String* string) {
if (InNewSpace(string)) return NULL;
// Find the corresponding symbol map for strings.
- Map* map = string->map();
- if (map == ascii_string_map()) {
- return ascii_symbol_map();
- }
- if (map == string_map()) {
- return symbol_map();
- }
- if (map == cons_string_map()) {
- return cons_symbol_map();
- }
- if (map == cons_ascii_string_map()) {
- return cons_ascii_symbol_map();
- }
- if (map == external_string_map()) {
- return external_symbol_map();
+ switch (string->map()->instance_type()) {
+ case STRING_TYPE: return symbol_map();
+ case ASCII_STRING_TYPE: return ascii_symbol_map();
+ case CONS_STRING_TYPE: return cons_symbol_map();
+ case CONS_ASCII_STRING_TYPE: return cons_ascii_symbol_map();
+ case EXTERNAL_STRING_TYPE: return external_symbol_map();
+ case EXTERNAL_ASCII_STRING_TYPE: return external_ascii_symbol_map();
+ case EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
+ return external_symbol_with_ascii_data_map();
+ case SHORT_EXTERNAL_STRING_TYPE: return short_external_symbol_map();
+ case SHORT_EXTERNAL_ASCII_STRING_TYPE:
+ return short_external_ascii_symbol_map();
+ case SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
+ return short_external_symbol_with_ascii_data_map();
+ default: return NULL; // No match found.
}
- if (map == external_ascii_string_map()) {
- return external_ascii_symbol_map();
- }
- if (map == external_string_with_ascii_data_map()) {
- return external_symbol_with_ascii_data_map();
- }
-
- // No match found.
- return NULL;
}
@@ -3965,8 +4272,8 @@ MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
uint32_t hash_field) {
ASSERT(chars >= 0);
// Ensure the chars matches the number of characters in the buffer.
- ASSERT(static_cast<unsigned>(chars) == buffer->Length());
- // Determine whether the string is ascii.
+ ASSERT(static_cast<unsigned>(chars) == buffer->Utf16Length());
+ // Determine whether the string is ASCII.
bool is_ascii = true;
while (buffer->has_more()) {
if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) {
@@ -3996,24 +4303,30 @@ MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
// Allocate string.
Object* result;
- { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
+ { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
: old_data_space_->AllocateRaw(size);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<HeapObject*>(result)->set_map(map);
+ reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
// Set length and hash fields of the allocated string.
String* answer = String::cast(result);
answer->set_length(chars);
answer->set_hash_field(hash_field);
- SeqString::cast(answer)->set_symbol_id(0);
ASSERT_EQ(size, answer->Size());
// Fill in the characters.
- for (int i = 0; i < chars; i++) {
- answer->Set(i, buffer->GetNext());
+ int i = 0;
+ while (i < chars) {
+ uint32_t character = buffer->GetNext();
+ if (character > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ answer->Set(i++, unibrow::Utf16::LeadSurrogate(character));
+ answer->Set(i++, unibrow::Utf16::TrailSurrogate(character));
+ } else {
+ answer->Set(i++, character);
+ }
}
return answer;
}
@@ -4034,11 +4347,12 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
if (size > kMaxObjectSizeInNewSpace) {
// Allocate in large object space, retry space will be ignored.
space = LO_SPACE;
- } else if (size > MaxObjectSizeInPagedSpace()) {
+ } else if (size > Page::kMaxNonCodeHeapObjectSize) {
// Allocate in new space, retry in large object space.
retry_space = LO_SPACE;
}
- } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
+ } else if (space == OLD_DATA_SPACE &&
+ size > Page::kMaxNonCodeHeapObjectSize) {
space = LO_SPACE;
}
Object* result;
@@ -4047,10 +4361,9 @@ MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
}
// Partially initialize the object.
- HeapObject::cast(result)->set_map(ascii_string_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map());
String::cast(result)->set_length(length);
String::cast(result)->set_hash_field(String::kEmptyHashField);
- SeqString::cast(result)->set_symbol_id(0);
ASSERT_EQ(size, HeapObject::cast(result)->Size());
return result;
}
@@ -4070,11 +4383,12 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
if (size > kMaxObjectSizeInNewSpace) {
// Allocate in large object space, retry space will be ignored.
space = LO_SPACE;
- } else if (size > MaxObjectSizeInPagedSpace()) {
+ } else if (size > Page::kMaxNonCodeHeapObjectSize) {
// Allocate in new space, retry in large object space.
retry_space = LO_SPACE;
}
- } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
+ } else if (space == OLD_DATA_SPACE &&
+ size > Page::kMaxNonCodeHeapObjectSize) {
space = LO_SPACE;
}
Object* result;
@@ -4083,15 +4397,33 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length,
}
// Partially initialize the object.
- HeapObject::cast(result)->set_map(string_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(string_map());
String::cast(result)->set_length(length);
String::cast(result)->set_hash_field(String::kEmptyHashField);
- SeqString::cast(result)->set_symbol_id(0);
ASSERT_EQ(size, HeapObject::cast(result)->Size());
return result;
}
+MaybeObject* Heap::AllocateJSArray(
+ ElementsKind elements_kind,
+ PretenureFlag pretenure) {
+ Context* global_context = isolate()->context()->global_context();
+ JSFunction* array_function = global_context->array_function();
+ Map* map = array_function->initial_map();
+ if (elements_kind == FAST_DOUBLE_ELEMENTS) {
+ map = Map::cast(global_context->double_js_array_map());
+ } else if (elements_kind == FAST_ELEMENTS || !FLAG_smi_only_arrays) {
+ map = Map::cast(global_context->object_js_array_map());
+ } else {
+ ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
+ ASSERT(map == global_context->smi_js_array_map());
+ }
+
+ return AllocateJSObjectFromMap(map, pretenure);
+}
+
+
MaybeObject* Heap::AllocateEmptyFixedArray() {
int size = FixedArray::SizeFor(0);
Object* result;
@@ -4100,7 +4432,8 @@ MaybeObject* Heap::AllocateEmptyFixedArray() {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
// Initialize the object.
- reinterpret_cast<FixedArray*>(result)->set_map(fixed_array_map());
+ reinterpret_cast<FixedArray*>(result)->set_map_no_write_barrier(
+ fixed_array_map());
reinterpret_cast<FixedArray*>(result)->set_length(0);
return result;
}
@@ -4129,13 +4462,13 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
}
if (InNewSpace(obj)) {
HeapObject* dst = HeapObject::cast(obj);
- dst->set_map(map);
+ dst->set_map_no_write_barrier(map);
CopyBlock(dst->address() + kPointerSize,
src->address() + kPointerSize,
FixedArray::SizeFor(len) - kPointerSize);
return obj;
}
- HeapObject::cast(obj)->set_map(map);
+ HeapObject::cast(obj)->set_map_no_write_barrier(map);
FixedArray* result = FixedArray::cast(obj);
result->set_length(len);
@@ -4155,7 +4488,7 @@ MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
HeapObject* dst = HeapObject::cast(obj);
- dst->set_map(map);
+ dst->set_map_no_write_barrier(map);
CopyBlock(
dst->address() + FixedDoubleArray::kLengthOffset,
src->address() + FixedDoubleArray::kLengthOffset,
@@ -4173,7 +4506,7 @@ MaybeObject* Heap::AllocateFixedArray(int length) {
}
// Initialize header.
FixedArray* array = reinterpret_cast<FixedArray*>(result);
- array->set_map(fixed_array_map());
+ array->set_map_no_write_barrier(fixed_array_map());
array->set_length(length);
// Initialize body.
ASSERT(!InNewSpace(undefined_value()));
@@ -4194,13 +4527,13 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
// Too big for new space.
space = LO_SPACE;
} else if (space == OLD_POINTER_SPACE &&
- size > MaxObjectSizeInPagedSpace()) {
+ size > Page::kMaxNonCodeHeapObjectSize) {
// Too big for old pointer space.
space = LO_SPACE;
}
AllocationSpace retry_space =
- (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
+ (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE;
return AllocateRaw(size, space, retry_space);
}
@@ -4221,7 +4554,7 @@ MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- HeapObject::cast(result)->set_map(heap->fixed_array_map());
+ HeapObject::cast(result)->set_map_no_write_barrier(heap->fixed_array_map());
FixedArray* array = FixedArray::cast(result);
array->set_length(length);
MemsetPointer(array->data_start(), filler, length);
@@ -4254,7 +4587,8 @@ MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
+ reinterpret_cast<FixedArray*>(obj)->set_map_no_write_barrier(
+ fixed_array_map());
FixedArray::cast(obj)->set_length(length);
return obj;
}
@@ -4268,7 +4602,7 @@ MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
// Initialize the object.
- reinterpret_cast<FixedDoubleArray*>(result)->set_map(
+ reinterpret_cast<FixedDoubleArray*>(result)->set_map_no_write_barrier(
fixed_double_array_map());
reinterpret_cast<FixedDoubleArray*>(result)->set_length(0);
return result;
@@ -4278,16 +4612,38 @@ MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
MaybeObject* Heap::AllocateUninitializedFixedDoubleArray(
int length,
PretenureFlag pretenure) {
- if (length == 0) return empty_fixed_double_array();
+ if (length == 0) return empty_fixed_array();
+
+ Object* elements_object;
+ MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
+ if (!maybe_obj->ToObject(&elements_object)) return maybe_obj;
+ FixedDoubleArray* elements =
+ reinterpret_cast<FixedDoubleArray*>(elements_object);
+
+ elements->set_map_no_write_barrier(fixed_double_array_map());
+ elements->set_length(length);
+ return elements;
+}
+
+
+MaybeObject* Heap::AllocateFixedDoubleArrayWithHoles(
+ int length,
+ PretenureFlag pretenure) {
+ if (length == 0) return empty_fixed_array();
- Object* obj;
- { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ Object* elements_object;
+ MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
+ if (!maybe_obj->ToObject(&elements_object)) return maybe_obj;
+ FixedDoubleArray* elements =
+ reinterpret_cast<FixedDoubleArray*>(elements_object);
+
+ for (int i = 0; i < length; ++i) {
+ elements->set_the_hole(i);
}
- reinterpret_cast<FixedDoubleArray*>(obj)->set_map(fixed_double_array_map());
- FixedDoubleArray::cast(obj)->set_length(length);
- return obj;
+ elements->set_map_no_write_barrier(fixed_double_array_map());
+ elements->set_length(length);
+ return elements;
}
@@ -4304,13 +4660,13 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
// Too big for new space.
space = LO_SPACE;
} else if (space == OLD_DATA_SPACE &&
- size > MaxObjectSizeInPagedSpace()) {
+ size > Page::kMaxNonCodeHeapObjectSize) {
// Too big for old data space.
space = LO_SPACE;
}
AllocationSpace retry_space =
- (size <= MaxObjectSizeInPagedSpace()) ? OLD_DATA_SPACE : LO_SPACE;
+ (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
return AllocateRaw(size, space, retry_space);
}
@@ -4321,7 +4677,8 @@ MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
{ MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
+ reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(
+ hash_table_map());
ASSERT(result->IsHashTable());
return result;
}
@@ -4334,7 +4691,10 @@ MaybeObject* Heap::AllocateGlobalContext() {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(global_context_map());
+ context->set_map_no_write_barrier(global_context_map());
+ context->set_smi_js_array_map(undefined_value());
+ context->set_double_js_array_map(undefined_value());
+ context->set_object_js_array_map(undefined_value());
ASSERT(context->IsGlobalContext());
ASSERT(result->IsContext());
return result;
@@ -4348,12 +4708,11 @@ MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(function_context_map());
+ context->set_map_no_write_barrier(function_context_map());
context->set_closure(function);
context->set_previous(function->context());
context->set_extension(NULL);
context->set_global(function->context()->global());
- context->set_qml_global(function->context()->qml_global());
return context;
}
@@ -4369,12 +4728,11 @@ MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(catch_context_map());
+ context->set_map_no_write_barrier(catch_context_map());
context->set_closure(function);
context->set_previous(previous);
context->set_extension(name);
context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
return context;
}
@@ -4388,43 +4746,38 @@ MaybeObject* Heap::AllocateWithContext(JSFunction* function,
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(with_context_map());
+ context->set_map_no_write_barrier(with_context_map());
context->set_closure(function);
context->set_previous(previous);
context->set_extension(extension);
context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
return context;
}
MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
Context* previous,
- SerializedScopeInfo* scope_info) {
+ ScopeInfo* scope_info) {
Object* result;
{ MaybeObject* maybe_result =
- AllocateFixedArrayWithHoles(scope_info->NumberOfContextSlots());
+ AllocateFixedArrayWithHoles(scope_info->ContextLength());
if (!maybe_result->ToObject(&result)) return maybe_result;
}
Context* context = reinterpret_cast<Context*>(result);
- context->set_map(block_context_map());
+ context->set_map_no_write_barrier(block_context_map());
context->set_closure(function);
context->set_previous(previous);
context->set_extension(scope_info);
context->set_global(previous->global());
- context->set_qml_global(previous->qml_global());
return context;
}
-MaybeObject* Heap::AllocateSerializedScopeInfo(int length) {
- Object* result;
- { MaybeObject* maybe_result = AllocateFixedArray(length, TENURED);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- SerializedScopeInfo* scope_info =
- reinterpret_cast<SerializedScopeInfo*>(result);
- scope_info->set_map(serialized_scope_info_map());
+MaybeObject* Heap::AllocateScopeInfo(int length) {
+ FixedArray* scope_info;
+ MaybeObject* maybe_scope_info = AllocateFixedArray(length, TENURED);
+ if (!maybe_scope_info->To(&scope_info)) return maybe_scope_info;
+ scope_info->set_map_no_write_barrier(scope_info_map());
return scope_info;
}
@@ -4442,7 +4795,7 @@ STRUCT_LIST(MAKE_CASE)
}
int size = map->instance_size();
AllocationSpace space =
- (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
+ (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
Object* result;
{ MaybeObject* maybe_result = Allocate(map, space);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -4461,13 +4814,116 @@ bool Heap::IsHeapIterable() {
void Heap::EnsureHeapIsIterable() {
ASSERT(IsAllocationAllowed());
if (!IsHeapIterable()) {
- CollectAllGarbage(kMakeHeapIterableMask);
+ CollectAllGarbage(kMakeHeapIterableMask, "Heap::EnsureHeapIsIterable");
}
ASSERT(IsHeapIterable());
}
-bool Heap::IdleNotification() {
+void Heap::AdvanceIdleIncrementalMarking(intptr_t step_size) {
+ incremental_marking()->Step(step_size,
+ IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+
+ if (incremental_marking()->IsComplete()) {
+ bool uncommit = false;
+ if (gc_count_at_last_idle_gc_ == gc_count_) {
+ // No GC since the last full GC, the mutator is probably not active.
+ isolate_->compilation_cache()->Clear();
+ uncommit = true;
+ }
+ CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental");
+ gc_count_at_last_idle_gc_ = gc_count_;
+ if (uncommit) {
+ new_space_.Shrink();
+ UncommitFromSpace();
+ }
+ }
+}
+
+
+bool Heap::IdleNotification(int hint) {
+ const int kMaxHint = 1000;
+ intptr_t size_factor = Min(Max(hint, 30), kMaxHint) / 10;
+ // The size factor is in range [3..100].
+ intptr_t step_size = size_factor * IncrementalMarking::kAllocatedThreshold;
+
+ if (contexts_disposed_ > 0) {
+ if (hint >= kMaxHint) {
+ // The embedder is requesting a lot of GC work after context disposal,
+ // we age inline caches so that they don't keep objects from
+ // the old context alive.
+ AgeInlineCaches();
+ }
+ int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000);
+ if (hint >= mark_sweep_time && !FLAG_expose_gc &&
+ incremental_marking()->IsStopped()) {
+ HistogramTimerScope scope(isolate_->counters()->gc_context());
+ CollectAllGarbage(kReduceMemoryFootprintMask,
+ "idle notification: contexts disposed");
+ } else {
+ AdvanceIdleIncrementalMarking(step_size);
+ contexts_disposed_ = 0;
+ }
+ // Make sure that we have no pending context disposals.
+ // Take into account that we might have decided to delay full collection
+ // because incremental marking is in progress.
+ ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
+ return false;
+ }
+
+ if (hint >= kMaxHint || !FLAG_incremental_marking ||
+ FLAG_expose_gc || Serializer::enabled()) {
+ return IdleGlobalGC();
+ }
+
+ // By doing small chunks of GC work in each IdleNotification,
+ // perform a round of incremental GCs and after that wait until
+ // the mutator creates enough garbage to justify a new round.
+ // An incremental GC progresses as follows:
+ // 1. many incremental marking steps,
+ // 2. one old space mark-sweep-compact,
+ // 3. many lazy sweep steps.
+ // Use mark-sweep-compact events to count incremental GCs in a round.
+
+
+ if (incremental_marking()->IsStopped()) {
+ if (!IsSweepingComplete() &&
+ !AdvanceSweepers(static_cast<int>(step_size))) {
+ return false;
+ }
+ }
+
+ if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
+ if (EnoughGarbageSinceLastIdleRound()) {
+ StartIdleRound();
+ } else {
+ return true;
+ }
+ }
+
+ int new_mark_sweeps = ms_count_ - ms_count_at_last_idle_notification_;
+ mark_sweeps_since_idle_round_started_ += new_mark_sweeps;
+ ms_count_at_last_idle_notification_ = ms_count_;
+
+ if (mark_sweeps_since_idle_round_started_ >= kMaxMarkSweepsInIdleRound) {
+ FinishIdleRound();
+ return true;
+ }
+
+ if (incremental_marking()->IsStopped()) {
+ if (!WorthStartingGCWhenIdle()) {
+ FinishIdleRound();
+ return true;
+ }
+ incremental_marking()->Start();
+ }
+
+ AdvanceIdleIncrementalMarking(step_size);
+ return false;
+}
+
+
+bool Heap::IdleGlobalGC() {
static const int kIdlesBeforeScavenge = 4;
static const int kIdlesBeforeMarkSweep = 7;
static const int kIdlesBeforeMarkCompact = 8;
@@ -4495,12 +4951,7 @@ bool Heap::IdleNotification() {
}
if (number_idle_notifications_ == kIdlesBeforeScavenge) {
- if (contexts_disposed_ > 0) {
- HistogramTimerScope scope(isolate_->counters()->gc_context());
- CollectAllGarbage(kNoGCFlags);
- } else {
- CollectGarbage(NEW_SPACE);
- }
+ CollectGarbage(NEW_SPACE, "idle notification");
new_space_.Shrink();
last_idle_notification_gc_count_ = gc_count_;
} else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
@@ -4509,32 +4960,16 @@ bool Heap::IdleNotification() {
// generated code for cached functions.
isolate_->compilation_cache()->Clear();
- CollectAllGarbage(kNoGCFlags);
+ CollectAllGarbage(kReduceMemoryFootprintMask, "idle notification");
new_space_.Shrink();
last_idle_notification_gc_count_ = gc_count_;
} else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
- CollectAllGarbage(kNoGCFlags);
+ CollectAllGarbage(kReduceMemoryFootprintMask, "idle notification");
new_space_.Shrink();
last_idle_notification_gc_count_ = gc_count_;
number_idle_notifications_ = 0;
finished = true;
- } else if (contexts_disposed_ > 0) {
- if (FLAG_expose_gc) {
- contexts_disposed_ = 0;
- } else {
- HistogramTimerScope scope(isolate_->counters()->gc_context());
- CollectAllGarbage(kNoGCFlags);
- last_idle_notification_gc_count_ = gc_count_;
- }
- // If this is the first idle notification, we reset the
- // notification count to avoid letting idle notifications for
- // context disposal garbage collections start a potentially too
- // aggressive idle GC cycle.
- if (number_idle_notifications_ <= 1) {
- number_idle_notifications_ = 0;
- uncommit = false;
- }
} else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
// If we have received more than kIdlesBeforeMarkCompact idle
// notifications we do not perform any cleanup because we don't
@@ -4542,11 +4977,6 @@ bool Heap::IdleNotification() {
finished = true;
}
- // Make sure that we have no pending context disposals and
- // conditionally uncommit from space.
- // Take into account that we might have decided to delay full collection
- // because incremental marking is in progress.
- ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
if (uncommit) UncommitFromSpace();
return finished;
@@ -4556,7 +4986,7 @@ bool Heap::IdleNotification() {
#ifdef DEBUG
void Heap::Print() {
- if (!HasBeenSetup()) return;
+ if (!HasBeenSetUp()) return;
isolate()->PrintStack();
AllSpaces spaces;
for (Space* space = spaces.next(); space != NULL; space = spaces.next())
@@ -4621,7 +5051,7 @@ bool Heap::Contains(HeapObject* value) {
bool Heap::Contains(Address addr) {
if (OS::IsOutsideAllocatedSpace(addr)) return false;
- return HasBeenSetup() &&
+ return HasBeenSetUp() &&
(new_space_.ToSpaceContains(addr) ||
old_pointer_space_->Contains(addr) ||
old_data_space_->Contains(addr) ||
@@ -4639,7 +5069,7 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
bool Heap::InSpace(Address addr, AllocationSpace space) {
if (OS::IsOutsideAllocatedSpace(addr)) return false;
- if (!HasBeenSetup()) return false;
+ if (!HasBeenSetUp()) return false;
switch (space) {
case NEW_SPACE:
@@ -4664,7 +5094,7 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
#ifdef DEBUG
void Heap::Verify() {
- ASSERT(HasBeenSetup());
+ ASSERT(HasBeenSetUp());
store_buffer()->Verify();
@@ -4682,8 +5112,37 @@ void Heap::Verify() {
cell_space_->Verify(&no_dirty_regions_visitor);
lo_space_->Verify();
-}
+ VerifyNoAccessorPairSharing();
+}
+
+
+void Heap::VerifyNoAccessorPairSharing() {
+ // Verification is done in 2 phases: First we mark all AccessorPairs, checking
+ // that we mark only unmarked pairs, then we clear all marks, restoring the
+ // initial state. We use the Smi tag of the AccessorPair's getter as the
+ // marking bit, because we can never see a Smi as the getter.
+ for (int phase = 0; phase < 2; phase++) {
+ HeapObjectIterator iter(map_space());
+ for (HeapObject* obj = iter.Next(); obj != NULL; obj = iter.Next()) {
+ if (obj->IsMap()) {
+ DescriptorArray* descs = Map::cast(obj)->instance_descriptors();
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ if (descs->GetType(i) == CALLBACKS &&
+ descs->GetValue(i)->IsAccessorPair()) {
+ AccessorPair* accessors = AccessorPair::cast(descs->GetValue(i));
+ uintptr_t before = reinterpret_cast<intptr_t>(accessors->getter());
+ uintptr_t after = (phase == 0) ?
+ ((before & ~kSmiTagMask) | kSmiTag) :
+ ((before & ~kHeapObjectTag) | kHeapObjectTag);
+ CHECK(before != after);
+ accessors->set_getter(reinterpret_cast<Object*>(after));
+ }
+ }
+ }
+ }
+ }
+}
#endif // DEBUG
@@ -4783,7 +5242,7 @@ void Heap::ZapFromSpace() {
new_space_.FromSpaceEnd());
while (it.has_next()) {
NewSpacePage* page = it.next();
- for (Address cursor = page->body(), limit = page->body_limit();
+ for (Address cursor = page->area_start(), limit = page->area_end();
cursor < limit;
cursor += kPointerSize) {
Memory::Address_at(cursor) = kFromSpaceZapValue;
@@ -4922,9 +5381,9 @@ void Heap::OldPointerSpaceCheckStoreBuffer() {
while (pages.has_next()) {
Page* page = pages.next();
- Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
+ Object** current = reinterpret_cast<Object**>(page->area_start());
- Address end = page->ObjectAreaEnd();
+ Address end = page->area_end();
Object*** store_buffer_position = store_buffer()->Start();
Object*** store_buffer_top = store_buffer()->Top();
@@ -4950,9 +5409,9 @@ void Heap::MapSpaceCheckStoreBuffer() {
while (pages.has_next()) {
Page* page = pages.next();
- Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
+ Object** current = reinterpret_cast<Object**>(page->area_start());
- Address end = page->ObjectAreaEnd();
+ Address end = page->area_end();
Object*** store_buffer_position = store_buffer()->Start();
Object*** store_buffer_top = store_buffer()->Top();
@@ -5004,29 +5463,29 @@ void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
- v->Synchronize("symbol_table");
+ v->Synchronize(VisitorSynchronization::kSymbolTable);
if (mode != VISIT_ALL_IN_SCAVENGE &&
mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this.
external_string_table_.Iterate(v);
}
- v->Synchronize("external_string_table");
+ v->Synchronize(VisitorSynchronization::kExternalStringsTable);
}
void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
- v->Synchronize("strong_root_list");
+ v->Synchronize(VisitorSynchronization::kStrongRootList);
v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
- v->Synchronize("symbol");
+ v->Synchronize(VisitorSynchronization::kSymbol);
isolate_->bootstrapper()->Iterate(v);
- v->Synchronize("bootstrapper");
+ v->Synchronize(VisitorSynchronization::kBootstrapper);
isolate_->Iterate(v);
- v->Synchronize("top");
+ v->Synchronize(VisitorSynchronization::kTop);
Relocatable::Iterate(v);
- v->Synchronize("relocatable");
+ v->Synchronize(VisitorSynchronization::kRelocatable);
#ifdef ENABLE_DEBUGGER_SUPPORT
isolate_->debug()->Iterate(v);
@@ -5034,13 +5493,13 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
isolate_->deoptimizer_data()->Iterate(v);
}
#endif
- v->Synchronize("debug");
+ v->Synchronize(VisitorSynchronization::kDebug);
isolate_->compilation_cache()->Iterate(v);
- v->Synchronize("compilationcache");
+ v->Synchronize(VisitorSynchronization::kCompilationCache);
// Iterate over local handles in handle scopes.
isolate_->handle_scope_implementer()->Iterate(v);
- v->Synchronize("handlescope");
+ v->Synchronize(VisitorSynchronization::kHandleScope);
// Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects
@@ -5048,7 +5507,7 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
if (mode != VISIT_ALL_IN_SCAVENGE) {
isolate_->builtins()->IterateBuiltins(v);
}
- v->Synchronize("builtins");
+ v->Synchronize(VisitorSynchronization::kBuiltins);
// Iterate over global handles.
switch (mode) {
@@ -5063,11 +5522,11 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
isolate_->global_handles()->IterateAllRoots(v);
break;
}
- v->Synchronize("globalhandles");
+ v->Synchronize(VisitorSynchronization::kGlobalHandles);
// Iterate over pointers being held by inactive threads.
isolate_->thread_manager()->Iterate(v);
- v->Synchronize("threadmanager");
+ v->Synchronize(VisitorSynchronization::kThreadManager);
// Iterate over the pointers the Serialization/Deserialization code is
// holding.
@@ -5091,7 +5550,7 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
bool Heap::ConfigureHeap(int max_semispace_size,
intptr_t max_old_gen_size,
intptr_t max_executable_size) {
- if (HasBeenSetup()) return false;
+ if (HasBeenSetUp()) return false;
if (max_semispace_size > 0) {
if (max_semispace_size < Page::kPageSize) {
@@ -5165,15 +5624,15 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->end_marker = HeapStats::kEndMarker;
*stats->new_space_size = new_space_.SizeAsInt();
*stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
- *stats->old_pointer_space_size = old_pointer_space_->Size();
+ *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
*stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
- *stats->old_data_space_size = old_data_space_->Size();
+ *stats->old_data_space_size = old_data_space_->SizeOfObjects();
*stats->old_data_space_capacity = old_data_space_->Capacity();
- *stats->code_space_size = code_space_->Size();
+ *stats->code_space_size = code_space_->SizeOfObjects();
*stats->code_space_capacity = code_space_->Capacity();
- *stats->map_space_size = map_space_->Size();
+ *stats->map_space_size = map_space_->SizeOfObjects();
*stats->map_space_capacity = map_space_->Capacity();
- *stats->cell_space_size = cell_space_->Size();
+ *stats->cell_space_size = cell_space_->SizeOfObjects();
*stats->cell_space_capacity = cell_space_->Capacity();
*stats->lo_space_size = lo_space_->Size();
isolate_->global_handles()->RecordStats(stats);
@@ -5207,6 +5666,16 @@ intptr_t Heap::PromotedSpaceSize() {
}
+intptr_t Heap::PromotedSpaceSizeOfObjects() {
+ return old_pointer_space_->SizeOfObjects()
+ + old_data_space_->SizeOfObjects()
+ + code_space_->SizeOfObjects()
+ + map_space_->SizeOfObjects()
+ + cell_space_->SizeOfObjects()
+ + lo_space_->SizeOfObjects();
+}
+
+
int Heap::PromotedExternalMemorySize() {
if (amount_of_external_allocated_memory_
<= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
@@ -5267,7 +5736,7 @@ class HeapDebugUtils {
Address map_addr = map_p->address();
- obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
+ obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
MarkObjectRecursively(&map);
@@ -5314,7 +5783,7 @@ class HeapDebugUtils {
HeapObject* map_p = HeapObject::FromAddress(map_addr);
- obj->set_map(reinterpret_cast<Map*>(map_p));
+ obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
@@ -5380,7 +5849,16 @@ class HeapDebugUtils {
#endif
-bool Heap::Setup(bool create_heap_objects) {
+
+V8_DECLARE_ONCE(initialize_gc_once);
+
+static void InitializeGCOnce() {
+ InitializeScavengingVisitorsTables();
+ NewSpaceScavenger::Initialize();
+ MarkCompactCollector::Initialize();
+}
+
+bool Heap::SetUp(bool create_heap_objects) {
#ifdef DEBUG
allocation_timeout_ = FLAG_gc_interval;
debug_utils_ = new HeapDebugUtils(this);
@@ -5390,7 +5868,7 @@ bool Heap::Setup(bool create_heap_objects) {
// goes wrong, just return false. The caller should check the results and
// call Heap::TearDown() to release allocated memory.
//
- // If the heap is not yet configured (eg, through the API), configure it.
+ // If the heap is not yet configured (e.g. through the API), configure it.
// Configuration is based on the flags new-space-size (really the semispace
// size) and old-space-size if set or the initial values of semispace_size_
// and old_generation_size_ otherwise.
@@ -5398,24 +5876,16 @@ bool Heap::Setup(bool create_heap_objects) {
if (!ConfigureHeapDefault()) return false;
}
- gc_initializer_mutex->Lock();
- static bool initialized_gc = false;
- if (!initialized_gc) {
- initialized_gc = true;
- InitializeScavengingVisitorsTables();
- NewSpaceScavenger::Initialize();
- MarkCompactCollector::Initialize();
- }
- gc_initializer_mutex->Unlock();
+ CallOnce(&initialize_gc_once, &InitializeGCOnce);
MarkMapPointersAsEncoded(false);
- // Setup memory allocator.
- if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
+ // Set up memory allocator.
+ if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize()))
return false;
- // Setup new space.
- if (!new_space_.Setup(reserved_semispace_size_, max_semispace_size_)) {
+ // Set up new space.
+ if (!new_space_.SetUp(reserved_semispace_size_, max_semispace_size_)) {
return false;
}
@@ -5426,7 +5896,7 @@ bool Heap::Setup(bool create_heap_objects) {
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
if (old_pointer_space_ == NULL) return false;
- if (!old_pointer_space_->Setup()) return false;
+ if (!old_pointer_space_->SetUp()) return false;
// Initialize old data space.
old_data_space_ =
@@ -5435,14 +5905,14 @@ bool Heap::Setup(bool create_heap_objects) {
OLD_DATA_SPACE,
NOT_EXECUTABLE);
if (old_data_space_ == NULL) return false;
- if (!old_data_space_->Setup()) return false;
+ if (!old_data_space_->SetUp()) return false;
// Initialize the code space, set its maximum capacity to the old
// generation size. It needs executable memory.
// On 64-bit platform(s), we put all code objects in a 2 GB range of
// virtual address space, so that they can call each other with near calls.
if (code_range_size_ > 0) {
- if (!isolate_->code_range()->Setup(code_range_size_)) {
+ if (!isolate_->code_range()->SetUp(code_range_size_)) {
return false;
}
}
@@ -5450,27 +5920,36 @@ bool Heap::Setup(bool create_heap_objects) {
code_space_ =
new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
if (code_space_ == NULL) return false;
- if (!code_space_->Setup()) return false;
+ if (!code_space_->SetUp()) return false;
// Initialize map space.
- map_space_ = new MapSpace(this,
- max_old_generation_size_,
- FLAG_max_map_space_pages,
- MAP_SPACE);
+ map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
if (map_space_ == NULL) return false;
- if (!map_space_->Setup()) return false;
+ if (!map_space_->SetUp()) return false;
// Initialize global property cell space.
cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
if (cell_space_ == NULL) return false;
- if (!cell_space_->Setup()) return false;
+ if (!cell_space_->SetUp()) return false;
// The large object code space may contain code or data. We set the memory
// to be non-executable here for safety, but this means we need to enable it
// explicitly when allocating large code objects.
lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
if (lo_space_ == NULL) return false;
- if (!lo_space_->Setup()) return false;
+ if (!lo_space_->SetUp()) return false;
+
+ // Set up the seed that is used to randomize the string hash function.
+ ASSERT(hash_seed() == 0);
+ if (FLAG_randomize_hashes) {
+ if (FLAG_hash_seed == 0) {
+ set_hash_seed(
+ Smi::FromInt(V8::RandomPrivate(isolate()) & 0x3fffffff));
+ } else {
+ set_hash_seed(Smi::FromInt(FLAG_hash_seed));
+ }
+ }
+
if (create_heap_objects) {
// Create initial maps.
if (!CreateInitialMaps()) return false;
@@ -5485,7 +5964,7 @@ bool Heap::Setup(bool create_heap_objects) {
LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
LOG(isolate_, IntPtrTEvent("heap-available", Available()));
- store_buffer()->Setup();
+ store_buffer()->SetUp();
return true;
}
@@ -5577,8 +6056,11 @@ void Heap::TearDown() {
void Heap::Shrink() {
// Try to shrink all paged spaces.
PagedSpaces spaces;
- for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
+ for (PagedSpace* space = spaces.next();
+ space != NULL;
+ space = spaces.next()) {
space->ReleaseAllUnusedPages();
+ }
}
@@ -6023,7 +6505,7 @@ void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
Address map_addr = map_p->address();
- obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
+ obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_addr + kMarkTag));
// Scan the object body.
if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
@@ -6065,7 +6547,7 @@ void PathTracer::UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor) {
HeapObject* map_p = HeapObject::FromAddress(map_addr);
- obj->set_map(reinterpret_cast<Map*>(map_p));
+ obj->set_map_no_write_barrier(reinterpret_cast<Map*>(map_p));
UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
@@ -6130,18 +6612,24 @@ static intptr_t CountTotalHolesSize() {
}
-GCTracer::GCTracer(Heap* heap)
+GCTracer::GCTracer(Heap* heap,
+ const char* gc_reason,
+ const char* collector_reason)
: start_time_(0.0),
- start_size_(0),
+ start_object_size_(0),
+ start_memory_size_(0),
gc_count_(0),
full_gc_count_(0),
allocated_since_last_gc_(0),
spent_in_mutator_(0),
promoted_objects_size_(0),
- heap_(heap) {
+ heap_(heap),
+ gc_reason_(gc_reason),
+ collector_reason_(collector_reason) {
if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
start_time_ = OS::TimeCurrentMillis();
- start_size_ = heap_->SizeOfObjects();
+ start_object_size_ = heap_->SizeOfObjects();
+ start_memory_size_ = heap_->isolate()->memory_allocator()->Size();
for (int i = 0; i < Scope::kNumberOfScopes; i++) {
scopes_[i] = 0;
@@ -6188,13 +6676,20 @@ GCTracer::~GCTracer() {
}
}
+ PrintF("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
+
if (!FLAG_trace_gc_nvp) {
int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
- PrintF("%s %.1f -> %.1f MB, ",
+ double end_memory_size_mb =
+ static_cast<double>(heap_->isolate()->memory_allocator()->Size()) / MB;
+
+ PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ",
CollectorString(),
- static_cast<double>(start_size_) / MB,
- SizeOfHeapObjects());
+ static_cast<double>(start_object_size_) / MB,
+ static_cast<double>(start_memory_size_) / MB,
+ SizeOfHeapObjects(),
+ end_memory_size_mb);
if (external_time > 0) PrintF("%d / ", external_time);
PrintF("%d ms", time);
@@ -6211,6 +6706,15 @@ GCTracer::~GCTracer() {
longest_step_);
}
}
+
+ if (gc_reason_ != NULL) {
+ PrintF(" [%s]", gc_reason_);
+ }
+
+ if (collector_reason_ != NULL) {
+ PrintF(" [%s]", collector_reason_);
+ }
+
PrintF(".\n");
} else {
PrintF("pause=%d ", time);
@@ -6234,9 +6738,21 @@ GCTracer::~GCTracer() {
PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
- PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
-
- PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
+ PrintF("evacuate=%d ", static_cast<int>(scopes_[Scope::MC_EVACUATE_PAGES]));
+ PrintF("new_new=%d ",
+ static_cast<int>(scopes_[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]));
+ PrintF("root_new=%d ",
+ static_cast<int>(scopes_[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]));
+ PrintF("old_new=%d ",
+ static_cast<int>(scopes_[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]));
+ PrintF("compaction_ptrs=%d ",
+ static_cast<int>(scopes_[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]));
+ PrintF("intracompaction_ptrs=%d ", static_cast<int>(scopes_[
+ Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]));
+ PrintF("misc_compaction=%d ",
+ static_cast<int>(scopes_[Scope::MC_UPDATE_MISC_POINTERS]));
+
+ PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_object_size_);
PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
in_free_list_or_wasted_before_gc_);
@@ -6280,10 +6796,12 @@ int KeyedLookupCache::Hash(Map* map, String* name) {
int KeyedLookupCache::Lookup(Map* map, String* name) {
- int index = Hash(map, name);
- Key& key = keys_[index];
- if ((key.map == map) && key.name->Equals(name)) {
- return field_offsets_[index];
+ int index = (Hash(map, name) & kHashMask);
+ for (int i = 0; i < kEntriesPerBucket; i++) {
+ Key& key = keys_[index + i];
+ if ((key.map == map) && key.name->Equals(name)) {
+ return field_offsets_[index + i];
+ }
}
return kNotFound;
}
@@ -6292,7 +6810,29 @@ int KeyedLookupCache::Lookup(Map* map, String* name) {
void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
String* symbol;
if (HEAP->LookupSymbolIfExists(name, &symbol)) {
- int index = Hash(map, symbol);
+ int index = (Hash(map, symbol) & kHashMask);
+ // After a GC there will be free slots, so we use them in order (this may
+ // help to get the most frequently used one in position 0).
+ for (int i = 0; i< kEntriesPerBucket; i++) {
+ Key& key = keys_[index];
+ Object* free_entry_indicator = NULL;
+ if (key.map == free_entry_indicator) {
+ key.map = map;
+ key.name = symbol;
+ field_offsets_[index + i] = field_offset;
+ return;
+ }
+ }
+ // No free entry found in this bucket, so we move them all down one and
+ // put the new entry at position zero.
+ for (int i = kEntriesPerBucket - 1; i > 0; i--) {
+ Key& key = keys_[index + i];
+ Key& key2 = keys_[index + i - 1];
+ key = key2;
+ field_offsets_[index + i] = field_offsets_[index + i - 1];
+ }
+
+ // Write the new first entry.
Key& key = keys_[index];
key.map = map;
key.name = symbol;
@@ -6347,7 +6887,9 @@ void TranscendentalCache::Clear() {
void ExternalStringTable::CleanUp() {
int last = 0;
for (int i = 0; i < new_space_strings_.length(); ++i) {
- if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+ if (new_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
+ continue;
+ }
if (heap_->InNewSpace(new_space_strings_[i])) {
new_space_strings_[last++] = new_space_strings_[i];
} else {
@@ -6357,7 +6899,9 @@ void ExternalStringTable::CleanUp() {
new_space_strings_.Rewind(last);
last = 0;
for (int i = 0; i < old_space_strings_.length(); ++i) {
- if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
+ if (old_space_strings_[i] == heap_->raw_unchecked_the_hole_value()) {
+ continue;
+ }
ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
old_space_strings_[last++] = old_space_strings_[i];
}
@@ -6369,19 +6913,6 @@ void ExternalStringTable::CleanUp() {
void ExternalStringTable::TearDown() {
- for (int i = 0; i < new_space_strings_.length(); ++i) {
- if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
- HeapObject *object = HeapObject::cast(new_space_strings_[i]);
- if (!object->IsExternalString())
- heap_->FinalizeExternalString(object);
- }
- for (int i = 0; i < old_space_strings_.length(); ++i) {
- if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
- HeapObject *object = HeapObject::cast(old_space_strings_[i]);
- if (!object->IsExternalString())
- heap_->FinalizeExternalString(object);
- }
-
new_space_strings_.Free();
old_space_strings_.Free();
}
@@ -6408,17 +6939,22 @@ void Heap::FreeQueuedChunks() {
// it try to perform a search in the list of pages owned by of the large
// object space and queued chunks were detached from that list.
// To work around this we split large chunk into normal kPageSize aligned
- // pieces and initialize owner field and flags of every piece.
- // If FromAnyPointerAddress encounteres a slot that belongs to one of
+ // pieces and initialize size, owner and flags field of every piece.
+ // If FromAnyPointerAddress encounters a slot that belongs to one of
// these smaller pieces it will treat it as a slot on a normal Page.
+ Address chunk_end = chunk->address() + chunk->size();
MemoryChunk* inner = MemoryChunk::FromAddress(
chunk->address() + Page::kPageSize);
- MemoryChunk* inner_last = MemoryChunk::FromAddress(
- chunk->address() + chunk->size() - 1);
+ MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1);
while (inner <= inner_last) {
// Size of a large chunk is always a multiple of
- // OS::AllocationAlignment() so there is always
+ // OS::AllocateAlignment() so there is always
// enough space for a fake MemoryChunk header.
+ Address area_end = Min(inner->address() + Page::kPageSize, chunk_end);
+ // Guard against overflow.
+ if (area_end < inner->address()) area_end = chunk_end;
+ inner->SetArea(inner->address(), area_end);
+ inner->set_size(Page::kPageSize);
inner->set_owner(lo_space());
inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
inner = MemoryChunk::FromAddress(
@@ -6435,4 +6971,19 @@ void Heap::FreeQueuedChunks() {
chunks_queued_for_free_ = NULL;
}
+
+void Heap::RememberUnmappedPage(Address page, bool compacted) {
+ uintptr_t p = reinterpret_cast<uintptr_t>(page);
+ // Tag the page pointer to make it findable in the dump file.
+ if (compacted) {
+ p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared.
+ } else {
+ p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
+ }
+ remembered_unmapped_pages_[remembered_unmapped_pages_index_] =
+ reinterpret_cast<Address>(p);
+ remembered_unmapped_pages_index_++;
+ remembered_unmapped_pages_index_ %= kRememberedUnmappedPages;
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/heap.h b/src/3rdparty/v8/src/heap.h
index 6166cde..0391e0e 100644
--- a/src/3rdparty/v8/src/heap.h
+++ b/src/3rdparty/v8/src/heap.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,12 +45,6 @@
namespace v8 {
namespace internal {
-// TODO(isolates): remove HEAP here
-#define HEAP (_inline_get_heap_())
-class Heap;
-inline Heap* _inline_get_heap_();
-
-
// Defines all the roots in Heap.
#define STRONG_ROOT_LIST(V) \
V(Map, byte_array_map, ByteArrayMap) \
@@ -73,18 +67,16 @@ inline Heap* _inline_get_heap_();
V(Map, global_context_map, GlobalContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, code_map, CodeMap) \
- V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \
+ V(Map, scope_info_map, ScopeInfoMap) \
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Map, hash_table_map, HashTableMap) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
- V(FixedDoubleArray, empty_fixed_double_array, EmptyFixedDoubleArray) \
V(String, empty_string, EmptyString) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
V(Smi, stack_limit, StackLimit) \
- V(Oddball, frame_alignment_marker, FrameAlignmentMarker) \
V(Oddball, arguments_marker, ArgumentsMarker) \
/* The first 32 roots above this line should be boring from a GC point of */ \
/* view. This means they are never in new space and never on a page that */ \
@@ -96,6 +88,7 @@ inline Heap* _inline_get_heap_();
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, string_split_cache, StringSplitCache) \
V(Object, termination_exception, TerminationException) \
+ V(Smi, hash_seed, HashSeed) \
V(Map, string_map, StringMap) \
V(Map, symbol_map, SymbolMap) \
V(Map, cons_string_map, ConsStringMap) \
@@ -110,6 +103,16 @@ inline Heap* _inline_get_heap_();
V(Map, external_string_map, ExternalStringMap) \
V(Map, external_string_with_ascii_data_map, ExternalStringWithAsciiDataMap) \
V(Map, external_ascii_string_map, ExternalAsciiStringMap) \
+ V(Map, short_external_symbol_map, ShortExternalSymbolMap) \
+ V(Map, \
+ short_external_symbol_with_ascii_data_map, \
+ ShortExternalSymbolWithAsciiDataMap) \
+ V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \
+ V(Map, short_external_string_map, ShortExternalStringMap) \
+ V(Map, \
+ short_external_string_with_ascii_data_map, \
+ ShortExternalStringWithAsciiDataMap) \
+ V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \
V(Map, undetectable_string_map, UndetectableStringMap) \
V(Map, undetectable_ascii_string_map, UndetectableAsciiStringMap) \
V(Map, external_pixel_array_map, ExternalPixelArrayMap) \
@@ -126,6 +129,7 @@ inline Heap* _inline_get_heap_();
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
V(Map, block_context_map, BlockContextMap) \
+ V(Map, module_context_map, ModuleContextMap) \
V(Map, oddball_map, OddballMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, foreign_map, ForeignMap) \
@@ -135,8 +139,8 @@ inline Heap* _inline_get_heap_();
V(Map, neander_map, NeanderMap) \
V(JSObject, message_listeners, MessageListeners) \
V(Foreign, prototype_accessors, PrototypeAccessors) \
- V(NumberDictionary, code_stubs, CodeStubs) \
- V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
+ V(UnseededNumberDictionary, code_stubs, CodeStubs) \
+ V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
V(Code, js_entry_code, JsEntryCode) \
V(Code, js_construct_entry_code, JsConstructEntryCode) \
@@ -145,6 +149,8 @@ inline Heap* _inline_get_heap_();
V(Script, empty_script, EmptyScript) \
V(Smi, real_stack_limit, RealStackLimit) \
V(StringDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
+ V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
+ V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
@@ -171,6 +177,7 @@ inline Heap* _inline_get_heap_();
V(eval_symbol, "eval") \
V(function_symbol, "function") \
V(length_symbol, "length") \
+ V(module_symbol, "module") \
V(name_symbol, "name") \
V(native_symbol, "native") \
V(null_symbol, "null") \
@@ -190,7 +197,6 @@ inline Heap* _inline_get_heap_();
V(string_symbol, "string") \
V(String_symbol, "String") \
V(Date_symbol, "Date") \
- V(Error_symbol, "Error") \
V(this_symbol, "this") \
V(to_string_symbol, "toString") \
V(char_at_symbol, "CharAt") \
@@ -200,12 +206,10 @@ inline Heap* _inline_get_heap_();
V(InitializeConstGlobal_symbol, "InitializeConstGlobal") \
V(KeyedLoadElementMonomorphic_symbol, \
"KeyedLoadElementMonomorphic") \
- V(KeyedLoadElementPolymorphic_symbol, \
- "KeyedLoadElementPolymorphic") \
V(KeyedStoreElementMonomorphic_symbol, \
"KeyedStoreElementMonomorphic") \
- V(KeyedStoreElementPolymorphic_symbol, \
- "KeyedStoreElementPolymorphic") \
+ V(KeyedStoreAndGrowElementMonomorphic_symbol, \
+ "KeyedStoreAndGrowElementMonomorphic") \
V(stack_overflow_symbol, "kStackOverflowBoilerplate") \
V(illegal_access_symbol, "illegal access") \
V(out_of_memory_symbol, "out-of-memory") \
@@ -236,8 +240,10 @@ inline Heap* _inline_get_heap_();
V(use_strict, "use strict") \
V(dot_symbol, ".") \
V(anonymous_function_symbol, "(anonymous function)") \
+ V(compare_ic_symbol, ".compare_ic") \
V(infinity_symbol, "Infinity") \
- V(minus_infinity_symbol, "-Infinity")
+ V(minus_infinity_symbol, "-Infinity") \
+ V(hidden_stack_trace_symbol, "v8::hidden_stack_trace")
// Forward declarations.
class GCTracer;
@@ -246,8 +252,8 @@ class Isolate;
class WeakObjectRetainer;
-typedef HeapObject* (*ExternalStringTableUpdaterCallback)(Heap* heap,
- Object** pointer);
+typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
+ Object** pointer);
class StoreBufferRebuilder {
public:
@@ -283,30 +289,64 @@ class HeapDebugUtils;
// by it's size to avoid dereferencing a map pointer for scanning.
class PromotionQueue {
public:
- PromotionQueue() : front_(NULL), rear_(NULL) { }
+ explicit PromotionQueue(Heap* heap)
+ : front_(NULL),
+ rear_(NULL),
+ limit_(NULL),
+ emergency_stack_(0),
+ heap_(heap) { }
+
+ void Initialize();
+
+ void Destroy() {
+ ASSERT(is_empty());
+ delete emergency_stack_;
+ emergency_stack_ = NULL;
+ }
+
+ inline void ActivateGuardIfOnTheSamePage();
+
+ Page* GetHeadPage() {
+ return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
+ }
+
+ void SetNewLimit(Address limit) {
+ if (!guard_) {
+ return;
+ }
- void Initialize(Address start_address) {
- // Assumes that a NewSpacePage exactly fits a number of promotion queue
- // entries (where each is a pair of intptr_t). This allows us to simplify
- // the test fpr when to switch pages.
- ASSERT((Page::kPageSize - MemoryChunk::kBodyOffset) % (2 * kPointerSize)
- == 0);
- ASSERT(NewSpacePage::IsAtEnd(start_address));
- front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
+ ASSERT(GetHeadPage() == Page::FromAllocationTop(limit));
+ limit_ = reinterpret_cast<intptr_t*>(limit);
+
+ if (limit_ <= rear_) {
+ return;
+ }
+
+ RelocateQueueHead();
}
- bool is_empty() { return front_ == rear_; }
+ bool is_empty() {
+ return (front_ == rear_) &&
+ (emergency_stack_ == NULL || emergency_stack_->length() == 0);
+ }
inline void insert(HeapObject* target, int size);
void remove(HeapObject** target, int* size) {
ASSERT(!is_empty());
+ if (front_ == rear_) {
+ Entry e = emergency_stack_->RemoveLast();
+ *target = e.obj_;
+ *size = e.size_;
+ return;
+ }
+
if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(front_))) {
NewSpacePage* front_page =
NewSpacePage::FromAddress(reinterpret_cast<Address>(front_));
ASSERT(!front_page->prev_page()->is_anchor());
front_ =
- reinterpret_cast<intptr_t*>(front_page->prev_page()->body_limit());
+ reinterpret_cast<intptr_t*>(front_page->prev_page()->area_end());
}
*target = reinterpret_cast<HeapObject*>(*(--front_));
*size = static_cast<int>(*(--front_));
@@ -319,6 +359,23 @@ class PromotionQueue {
// The front of the queue is higher in the memory page chain than the rear.
intptr_t* front_;
intptr_t* rear_;
+ intptr_t* limit_;
+
+ bool guard_;
+
+ static const int kEntrySizeInWords = 2;
+
+ struct Entry {
+ Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
+
+ HeapObject* obj_;
+ int size_;
+ };
+ List<Entry>* emergency_stack_;
+
+ Heap* heap_;
+
+ void RelocateQueueHead();
DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
};
@@ -332,14 +389,10 @@ typedef void (*ScavengingCallback)(Map* map,
// External strings table is a place where all external strings are
// registered. We need to keep track of such strings to properly
// finalize them.
-// The ExternalStringTable can contain both strings and objects with
-// external resources. It was not renamed to make the patch simpler.
class ExternalStringTable {
public:
// Registers an external string.
inline void AddString(String* string);
- // Registers an external object.
- inline void AddObject(HeapObject* string);
inline void Iterate(ObjectVisitor* v);
@@ -357,10 +410,10 @@ class ExternalStringTable {
inline void Verify();
- inline void AddOldObject(HeapObject* string);
+ inline void AddOldString(String* string);
// Notifies the table that only a prefix of the new list is valid.
- inline void ShrinkNewObjects(int position);
+ inline void ShrinkNewStrings(int position);
// To speed up scavenge collections new space string are kept
// separate from old space strings.
@@ -373,10 +426,15 @@ class ExternalStringTable {
};
+enum ArrayStorageAllocationMode {
+ DONT_INITIALIZE_ARRAY_ELEMENTS,
+ INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
+};
+
class Heap {
public:
// Configure heap size before setup. Return false if the heap has been
- // setup already.
+ // set up already.
bool ConfigureHeap(int max_semispace_size,
intptr_t max_old_gen_size,
intptr_t max_executable_size);
@@ -385,7 +443,7 @@ class Heap {
// Initializes the global object heap. If create_heap_objects is true,
// also creates the basic non-mutable objects.
// Returns whether it succeeded.
- bool Setup(bool create_heap_objects);
+ bool SetUp(bool create_heap_objects);
// Destroys all memory allocated by the heap.
void TearDown();
@@ -395,8 +453,8 @@ class Heap {
// jslimit_/real_jslimit_ variable in the StackGuard.
void SetStackLimits();
- // Returns whether Setup has been called.
- bool HasBeenSetup();
+ // Returns whether SetUp has been called.
+ bool HasBeenSetUp();
// Returns the maximum amount of memory reserved for the heap. For
// the young generation, we reserve 4 times the amount needed for a
@@ -427,9 +485,6 @@ class Heap {
// all available bytes. Check MaxHeapObjectSize() instead.
intptr_t Available();
- // Returns the maximum object size in paged space.
- inline int MaxObjectSizeInPagedSpace();
-
// Returns of size of all objects residing in the heap.
intptr_t SizeOfObjects();
@@ -474,6 +529,30 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateJSObject(
JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED);
+ // Allocate a JSArray with no elements
+ MUST_USE_RESULT MaybeObject* AllocateEmptyJSArray(
+ ElementsKind elements_kind,
+ PretenureFlag pretenure = NOT_TENURED) {
+ return AllocateJSArrayAndStorage(elements_kind, 0, 0,
+ DONT_INITIALIZE_ARRAY_ELEMENTS,
+ pretenure);
+ }
+
+ // Allocate a JSArray with a specified length but elements that are left
+ // uninitialized.
+ MUST_USE_RESULT MaybeObject* AllocateJSArrayAndStorage(
+ ElementsKind elements_kind,
+ int length,
+ int capacity,
+ ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS,
+ PretenureFlag pretenure = NOT_TENURED);
+
+ // Allocate a JSArray with no elements
+ MUST_USE_RESULT MaybeObject* AllocateJSArrayWithElements(
+ FixedArrayBase* array_base,
+ ElementsKind elements_kind,
+ PretenureFlag pretenure = NOT_TENURED);
+
// Allocates and initializes a new global object based on a constructor.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
@@ -552,11 +631,20 @@ class Heap {
MUST_USE_RESULT MaybeObject* AllocateCodeCache();
// Allocates a serialized scope info.
- MUST_USE_RESULT MaybeObject* AllocateSerializedScopeInfo(int length);
+ MUST_USE_RESULT MaybeObject* AllocateScopeInfo(int length);
// Allocates an empty PolymorphicCodeCache.
MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
+ // Allocates a pre-tenured empty AccessorPair.
+ MUST_USE_RESULT MaybeObject* AllocateAccessorPair();
+
+ // Allocates an empty TypeFeedbackInfo.
+ MUST_USE_RESULT MaybeObject* AllocateTypeFeedbackInfo();
+
+ // Allocates an AliasedArgumentsEntry.
+ MUST_USE_RESULT MaybeObject* AllocateAliasedArgumentsEntry(int slot);
+
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
@@ -629,7 +717,7 @@ class Heap {
PretenureFlag pretenure = NOT_TENURED);
// Computes a single character string where the character has code.
- // A cache is used for ascii codes.
+ // A cache is used for ASCII codes.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed. Please note this does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode(
@@ -717,6 +805,13 @@ class Heap {
int length,
PretenureFlag pretenure = NOT_TENURED);
+ // Allocates a fixed double array with hole values. Returns
+ // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
+ // Please note this does not perform a garbage collection.
+ MUST_USE_RESULT MaybeObject* AllocateFixedDoubleArrayWithHoles(
+ int length,
+ PretenureFlag pretenure = NOT_TENURED);
+
// AllocateHashTable is identical to AllocateFixedArray except
// that the resulting object has hash_table_map as map.
MUST_USE_RESULT MaybeObject* AllocateHashTable(
@@ -742,7 +837,7 @@ class Heap {
// Allocate a block context.
MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
Context* previous,
- SerializedScopeInfo* info);
+ ScopeInfo* info);
// Allocates a new utility object in the old generation.
MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
@@ -791,13 +886,15 @@ class Heap {
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
// Please note this does not perform a garbage collection.
- MUST_USE_RESULT inline MaybeObject* NumberFromInt32(int32_t value);
+ MUST_USE_RESULT inline MaybeObject* NumberFromInt32(
+ int32_t value, PretenureFlag pretenure = NOT_TENURED);
// Converts an int into either a Smi or a HeapNumber object.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
// Please note this does not perform a garbage collection.
- MUST_USE_RESULT inline MaybeObject* NumberFromUint32(uint32_t value);
+ MUST_USE_RESULT inline MaybeObject* NumberFromUint32(
+ uint32_t value, PretenureFlag pretenure = NOT_TENURED);
// Allocates a new foreign object.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
@@ -856,7 +953,7 @@ class Heap {
// Finalizes an external string by deleting the associated external
// data and clearing the resource pointer.
- inline void FinalizeExternalString(HeapObject* string);
+ inline void FinalizeExternalString(String* string);
// Allocates an uninitialized object. The memory is non-executable if the
// hardware and OS allow.
@@ -895,8 +992,7 @@ class Heap {
// Please note this function does not perform a garbage collection.
MUST_USE_RESULT MaybeObject* LookupSymbol(Vector<const char> str);
MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(Vector<const char> str);
- MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(
- Vector<const uc16> str);
+ MUST_USE_RESULT MaybeObject* LookupTwoByteSymbol(Vector<const uc16> str);
MUST_USE_RESULT MaybeObject* LookupAsciiSymbol(const char* str) {
return LookupSymbol(CStrVector(str));
}
@@ -932,23 +1028,34 @@ class Heap {
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
- bool CollectGarbage(AllocationSpace space, GarbageCollector collector);
+ bool CollectGarbage(AllocationSpace space,
+ GarbageCollector collector,
+ const char* gc_reason,
+ const char* collector_reason);
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
- inline bool CollectGarbage(AllocationSpace space);
+ inline bool CollectGarbage(AllocationSpace space,
+ const char* gc_reason = NULL);
static const int kNoGCFlags = 0;
- static const int kMakeHeapIterableMask = 1;
+ static const int kSweepPreciselyMask = 1;
+ static const int kReduceMemoryFootprintMask = 2;
+ static const int kAbortIncrementalMarkingMask = 4;
+
+ // Making the heap iterable requires us to sweep precisely and abort any
+ // incremental marking as well.
+ static const int kMakeHeapIterableMask =
+ kSweepPreciselyMask | kAbortIncrementalMarkingMask;
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
- void CollectAllGarbage(int flags);
+ void CollectAllGarbage(int flags, const char* gc_reason = NULL);
// Last hope GC, should try to squeeze as much as possible.
- void CollectAllAvailableGarbage();
+ void CollectAllAvailableGarbage(const char* gc_reason = NULL);
// Check whether the heap is currently iterable.
bool IsHeapIterable();
@@ -1005,7 +1112,7 @@ class Heap {
// Heap root getters. We have versions with and without type::cast() here.
// You can't use type::cast during GC because the assert fails.
// TODO(1490): Try removing the unchecked accessors, now that GC marking does
- // not corrupt the stack.
+ // not corrupt the map.
#define ROOT_ACCESSOR(type, name, camel_name) \
type* name() { \
return type::cast(roots_[k##camel_name##RootIndex]); \
@@ -1077,7 +1184,7 @@ class Heap {
inline AllocationSpace TargetSpaceId(InstanceType type);
// Sets the stub_cache_ (only used when expanding the dictionary).
- void public_set_code_stubs(NumberDictionary* value) {
+ void public_set_code_stubs(UnseededNumberDictionary* value) {
roots_[kCodeStubsRootIndex] = value;
}
@@ -1089,7 +1196,7 @@ class Heap {
}
// Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
- void public_set_non_monomorphic_cache(NumberDictionary* value) {
+ void public_set_non_monomorphic_cache(UnseededNumberDictionary* value) {
roots_[kNonMonomorphicCacheRootIndex] = value;
}
@@ -1123,6 +1230,10 @@ class Heap {
// Verify the heap is in its normal state before or after a GC.
void Verify();
+ // Verify that AccessorPairs are not shared, i.e. make sure that they have
+ // exactly one pointer to them.
+ void VerifyNoAccessorPairSharing();
+
void OldPointerSpaceCheckStoreBuffer();
void MapSpaceCheckStoreBuffer();
void LargeObjectSpaceCheckStoreBuffer();
@@ -1236,6 +1347,10 @@ class Heap {
return old_gen_allocation_limit_ - PromotedTotalSize();
}
+ inline intptr_t OldGenerationCapacityAvailable() {
+ return max_old_generation_size_ - PromotedTotalSize();
+ }
+
static const intptr_t kMinimumPromotionLimit = 5 * Page::kPageSize;
static const intptr_t kMinimumAllocationLimit =
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
@@ -1259,7 +1374,8 @@ class Heap {
Max(old_gen_size + old_gen_size / divisor, kMinimumPromotionLimit);
limit += new_space_.Capacity();
limit *= old_gen_limit_factor_;
- return limit;
+ intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
+ return Min(limit, halfway_to_the_max);
}
intptr_t OldGenAllocationLimit(intptr_t old_gen_size) {
@@ -1268,11 +1384,12 @@ class Heap {
Max(old_gen_size + old_gen_size / divisor, kMinimumAllocationLimit);
limit += new_space_.Capacity();
limit *= old_gen_limit_factor_;
- return limit;
+ intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
+ return Min(limit, halfway_to_the_max);
}
- // Can be called when the embedding application is idle.
- bool IdleNotification();
+ // Implements the corresponding V8 API function.
+ bool IdleNotification(int hint);
// Declare all the root indices.
enum RootListIndex {
@@ -1317,6 +1434,7 @@ class Heap {
void CheckNewSpaceExpansionCriteria();
inline void IncrementYoungSurvivorsCounter(int survived) {
+ ASSERT(survived >= 0);
young_survivors_after_last_gc_ = survived;
survived_since_last_expansion_ += survived;
}
@@ -1348,6 +1466,8 @@ class Heap {
void ProcessWeakReferences(WeakObjectRetainer* retainer);
+ void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
+
// Helper function that governs the promotion policy from new space to
// old. If the object's old address lies below the new space's age
// mark or if we've already filled the bottom 1/16th of the to space,
@@ -1360,10 +1480,18 @@ class Heap {
void ClearNormalizedMapCaches();
+ // Clears the cache of ICs related to this map.
+ void ClearCacheOnMap(Map* map) {
+ if (FLAG_cleanup_code_caches_at_gc) {
+ map->ClearCodeCache(this);
+ }
+ }
+
GCTracer* tracer() { return tracer_; }
// Returns the size of objects residing in non new spaces.
intptr_t PromotedSpaceSize();
+ intptr_t PromotedSpaceSizeOfObjects();
double total_regexp_code_generated() { return total_regexp_code_generated_; }
void IncreaseTotalRegexpCodeGenerated(int size) {
@@ -1395,6 +1523,17 @@ class Heap {
return &incremental_marking_;
}
+ bool IsSweepingComplete() {
+ return old_data_space()->IsSweepingComplete() &&
+ old_pointer_space()->IsSweepingComplete();
+ }
+
+ bool AdvanceSweepers(int step_size) {
+ bool sweeping_complete = old_data_space()->AdvanceSweeper(step_size);
+ sweeping_complete &= old_pointer_space()->AdvanceSweeper(step_size);
+ return sweeping_complete;
+ }
+
ExternalStringTable* external_string_table() {
return &external_string_table_;
}
@@ -1430,6 +1569,35 @@ class Heap {
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
+ uint32_t HashSeed() {
+ uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
+ ASSERT(FLAG_randomize_hashes || seed == 0);
+ return seed;
+ }
+
+ void SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
+ ASSERT(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
+ set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
+ void SetConstructStubDeoptPCOffset(int pc_offset) {
+ ASSERT(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
+ set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
+ }
+
+ // For post mortem debugging.
+ void RememberUnmappedPage(Address page, bool compacted);
+
+ // Global inline caching age: it is incremented on some GCs after context
+ // disposal. We use it to flush inline caches.
+ int global_ic_age() {
+ return global_ic_age_;
+ }
+
+ void AgeInlineCaches() {
+ ++global_ic_age_;
+ }
+
private:
Heap();
@@ -1457,6 +1625,8 @@ class Heap {
// For keeping track of context disposals.
int contexts_disposed_;
+ int global_ic_age_;
+
int scan_on_scavenge_pages_;
#if defined(V8_TARGET_ARCH_X64)
@@ -1481,6 +1651,11 @@ class Heap {
int ms_count_; // how many mark-sweep collections happened
unsigned int gc_count_; // how many gc happened
+ // For post mortem debugging.
+ static const int kRememberedUnmappedPages = 128;
+ int remembered_unmapped_pages_index_;
+ Address remembered_unmapped_pages_[kRememberedUnmappedPages];
+
// Total length of the strings we failed to flatten since the last GC.
int unflattened_strings_length_;
@@ -1509,6 +1684,10 @@ class Heap {
HeapDebugUtils* debug_utils_;
#endif // DEBUG
+ // Indicates that the new space should be kept small due to high promotion
+ // rates caused by the mutator allocating a lot of long-lived objects.
+ bool new_space_high_promotion_mode_active_;
+
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
// which collector to invoke.
@@ -1612,7 +1791,8 @@ class Heap {
}
// Checks whether a global GC is necessary
- GarbageCollector SelectGarbageCollector(AllocationSpace space);
+ GarbageCollector SelectGarbageCollector(AllocationSpace space,
+ const char** reason);
// Performs garbage collection
// Returns whether there is a chance another major GC could
@@ -1623,7 +1803,6 @@ class Heap {
inline void UpdateOldSpaceLimits();
-
// Allocate an uninitialized object in map space. The behavior is identical
// to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't
// have to test the allocation space argument and (b) can reduce code size
@@ -1652,6 +1831,11 @@ class Heap {
Object* to_number,
byte kind);
+ // Allocate a JSArray with no elements
+ MUST_USE_RESULT MaybeObject* AllocateJSArray(
+ ElementsKind elements_kind,
+ PretenureFlag pretenure = NOT_TENURED);
+
// Allocate empty fixed array.
MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray();
@@ -1661,7 +1845,7 @@ class Heap {
// Performs a minor collection in new generation.
void Scavenge();
- static HeapObject* UpdateNewSpaceReferenceInExternalStringTableEntry(
+ static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
Heap* heap,
Object** pointer);
@@ -1699,8 +1883,13 @@ class Heap {
GCTracer* tracer_;
- // Initializes the number to string cache based on the max semispace size.
- MUST_USE_RESULT MaybeObject* InitializeNumberStringCache();
+ // Allocates a small number to string cache.
+ MUST_USE_RESULT MaybeObject* AllocateInitialNumberStringCache();
+ // Creates and installs the full-sized number string cache.
+ void AllocateFullSizeNumberStringCache();
+ // Get the length of the number to string cache based on the max semispace
+ // size.
+ int FullSizeNumberStringCacheLength();
// Flush the number to string cache.
void FlushNumberStringCache();
@@ -1708,11 +1897,13 @@ class Heap {
enum SurvivalRateTrend { INCREASING, STABLE, DECREASING, FLUCTUATING };
- static const int kYoungSurvivalRateThreshold = 90;
+ static const int kYoungSurvivalRateHighThreshold = 90;
+ static const int kYoungSurvivalRateLowThreshold = 10;
static const int kYoungSurvivalRateAllowedDeviation = 15;
int young_survivors_after_last_gc_;
int high_survival_rate_period_length_;
+ int low_survival_rate_period_length_;
double survival_rate_;
SurvivalRateTrend previous_survival_rate_trend_;
SurvivalRateTrend survival_rate_trend_;
@@ -1745,6 +1936,16 @@ class Heap {
}
}
+ bool IsStableOrDecreasingSurvivalTrend() {
+ switch (survival_rate_trend()) {
+ case STABLE:
+ case DECREASING:
+ return true;
+ default:
+ return false;
+ }
+ }
+
bool IsIncreasingSurvivalTrend() {
return survival_rate_trend() == INCREASING;
}
@@ -1753,10 +1954,54 @@ class Heap {
return high_survival_rate_period_length_ > 0;
}
+ bool IsLowSurvivalRate() {
+ return low_survival_rate_period_length_ > 0;
+ }
+
void SelectScavengingVisitorsTable();
+ void StartIdleRound() {
+ mark_sweeps_since_idle_round_started_ = 0;
+ ms_count_at_last_idle_notification_ = ms_count_;
+ }
+
+ void FinishIdleRound() {
+ mark_sweeps_since_idle_round_started_ = kMaxMarkSweepsInIdleRound;
+ scavenges_since_last_idle_round_ = 0;
+ }
+
+ bool EnoughGarbageSinceLastIdleRound() {
+ return (scavenges_since_last_idle_round_ >= kIdleScavengeThreshold);
+ }
+
+ bool WorthStartingGCWhenIdle() {
+ if (contexts_disposed_ > 0) {
+ return true;
+ }
+ return incremental_marking()->WorthActivating();
+ }
+
+ // Estimates how many milliseconds a Mark-Sweep would take to complete.
+ // In idle notification handler we assume that this function will return:
+ // - a number less than 10 for small heaps, which are less than 8Mb.
+ // - a number greater than 10 for large heaps, which are greater than 32Mb.
+ int TimeMarkSweepWouldTakeInMs() {
+ // Rough estimate of how many megabytes of heap can be processed in 1 ms.
+ static const int kMbPerMs = 2;
+
+ int heap_size_mb = static_cast<int>(SizeOfObjects() / MB);
+ return heap_size_mb / kMbPerMs;
+ }
+
+ // Returns true if no more GC work is left.
+ bool IdleGlobalGC();
+
+ void AdvanceIdleIncrementalMarking(intptr_t step_size);
+
+
static const int kInitialSymbolTableSize = 2048;
static const int kInitialEvalCacheSize = 64;
+ static const int kInitialNumberStringCacheSize = 256;
// Maximum GC pause.
int max_gc_pause_;
@@ -1784,11 +2029,19 @@ class Heap {
unsigned int last_idle_notification_gc_count_;
bool last_idle_notification_gc_count_init_;
+ int mark_sweeps_since_idle_round_started_;
+ int ms_count_at_last_idle_notification_;
+ unsigned int gc_count_at_last_idle_gc_;
+ int scavenges_since_last_idle_round_;
+
+ static const int kMaxMarkSweepsInIdleRound = 7;
+ static const int kIdleScavengeThreshold = 5;
+
// Shared state read by the scavenge collector and set by ScavengeObject.
PromotionQueue promotion_queue_;
// Flag is set when the heap has been configured. The heap can be repeatedly
- // configured through the API until it is setup.
+ // configured through the API until it is set up.
bool configured_;
ExternalStringTable external_string_table_;
@@ -1847,32 +2100,15 @@ class HeapStats {
class AlwaysAllocateScope {
public:
- AlwaysAllocateScope() {
- // We shouldn't hit any nested scopes, because that requires
- // non-handle code to call handle code. The code still works but
- // performance will degrade, so we want to catch this situation
- // in debug mode.
- ASSERT(HEAP->always_allocate_scope_depth_ == 0);
- HEAP->always_allocate_scope_depth_++;
- }
-
- ~AlwaysAllocateScope() {
- HEAP->always_allocate_scope_depth_--;
- ASSERT(HEAP->always_allocate_scope_depth_ == 0);
- }
+ inline AlwaysAllocateScope();
+ inline ~AlwaysAllocateScope();
};
class LinearAllocationScope {
public:
- LinearAllocationScope() {
- HEAP->linear_allocation_scope_depth_++;
- }
-
- ~LinearAllocationScope() {
- HEAP->linear_allocation_scope_depth_--;
- ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
- }
+ inline LinearAllocationScope();
+ inline ~LinearAllocationScope();
};
@@ -1884,15 +2120,7 @@ class LinearAllocationScope {
// objects in a heap space but above the allocation pointer.
class VerifyPointersVisitor: public ObjectVisitor {
public:
- void VisitPointers(Object** start, Object** end) {
- for (Object** current = start; current < end; current++) {
- if ((*current)->IsHeapObject()) {
- HeapObject* object = HeapObject::cast(*current);
- ASSERT(HEAP->Contains(object));
- ASSERT(object->map()->IsMap());
- }
- }
- }
+ inline void VisitPointers(Object** start, Object** end);
};
#endif
@@ -2007,11 +2235,17 @@ class KeyedLookupCache {
// Clear the cache.
void Clear();
- static const int kLength = 64;
+ static const int kLength = 256;
static const int kCapacityMask = kLength - 1;
- static const int kMapHashShift = 2;
+ static const int kMapHashShift = 5;
+ static const int kHashMask = -4; // Zero the last two bits.
+ static const int kEntriesPerBucket = 4;
static const int kNotFound = -1;
+ // kEntriesPerBucket should be a power of 2.
+ STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
+ STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
+
private:
KeyedLookupCache() {
for (int i = 0; i < kLength; ++i) {
@@ -2112,72 +2346,47 @@ class DescriptorLookupCache {
};
-// A helper class to document/test C++ scopes where we do not
-// expect a GC. Usage:
-//
-// /* Allocation not allowed: we cannot handle a GC in this scope. */
-// { AssertNoAllocation nogc;
-// ...
-// }
-
#ifdef DEBUG
-
class DisallowAllocationFailure {
public:
- DisallowAllocationFailure() {
- old_state_ = HEAP->disallow_allocation_failure_;
- HEAP->disallow_allocation_failure_ = true;
- }
- ~DisallowAllocationFailure() {
- HEAP->disallow_allocation_failure_ = old_state_;
- }
+ inline DisallowAllocationFailure();
+ inline ~DisallowAllocationFailure();
+
private:
bool old_state_;
};
+#endif
+
+// A helper class to document/test C++ scopes where we do not
+// expect a GC. Usage:
+//
+// /* Allocation not allowed: we cannot handle a GC in this scope. */
+// { AssertNoAllocation nogc;
+// ...
+// }
class AssertNoAllocation {
public:
- AssertNoAllocation() {
- old_state_ = HEAP->allow_allocation(false);
- }
-
- ~AssertNoAllocation() {
- HEAP->allow_allocation(old_state_);
- }
+ inline AssertNoAllocation();
+ inline ~AssertNoAllocation();
+#ifdef DEBUG
private:
bool old_state_;
+#endif
};
+
class DisableAssertNoAllocation {
public:
- DisableAssertNoAllocation() {
- old_state_ = HEAP->allow_allocation(true);
- }
-
- ~DisableAssertNoAllocation() {
- HEAP->allow_allocation(old_state_);
- }
+ inline DisableAssertNoAllocation();
+ inline ~DisableAssertNoAllocation();
+#ifdef DEBUG
private:
bool old_state_;
-};
-
-#else // ndef DEBUG
-
-class AssertNoAllocation {
- public:
- AssertNoAllocation() { }
- ~AssertNoAllocation() { }
-};
-
-class DisableAssertNoAllocation {
- public:
- DisableAssertNoAllocation() { }
- ~DisableAssertNoAllocation() { }
-};
-
#endif
+};
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.
@@ -2191,7 +2400,13 @@ class GCTracer BASE_EMBEDDED {
MC_MARK,
MC_SWEEP,
MC_SWEEP_NEWSPACE,
- MC_COMPACT,
+ MC_EVACUATE_PAGES,
+ MC_UPDATE_NEW_TO_NEW_POINTERS,
+ MC_UPDATE_ROOT_TO_NEW_POINTERS,
+ MC_UPDATE_OLD_TO_NEW_POINTERS,
+ MC_UPDATE_POINTERS_TO_EVACUATED,
+ MC_UPDATE_POINTERS_BETWEEN_EVACUATED,
+ MC_UPDATE_MISC_POINTERS,
MC_FLUSH_CODE,
kNumberOfScopes
};
@@ -2213,7 +2428,9 @@ class GCTracer BASE_EMBEDDED {
double start_time_;
};
- explicit GCTracer(Heap* heap);
+ explicit GCTracer(Heap* heap,
+ const char* gc_reason,
+ const char* collector_reason);
~GCTracer();
// Sets the collector.
@@ -2234,15 +2451,21 @@ class GCTracer BASE_EMBEDDED {
const char* CollectorString();
// Returns size of object in heap (in MB).
- double SizeOfHeapObjects() {
- return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
- }
+ inline double SizeOfHeapObjects();
+
+ // Timestamp set in the constructor.
+ double start_time_;
+
+ // Size of objects in heap set in constructor.
+ intptr_t start_object_size_;
- double start_time_; // Timestamp set in the constructor.
- intptr_t start_size_; // Size of objects in heap set in constructor.
- GarbageCollector collector_; // Type of collector.
+ // Size of memory allocated from OS set in constructor.
+ intptr_t start_memory_size_;
- // A count (including this one, eg, the first collection is 1) of the
+ // Type of collector.
+ GarbageCollector collector_;
+
+ // A count (including this one, e.g. the first collection is 1) of the
// number of garbage collections.
unsigned int gc_count_;
@@ -2275,6 +2498,9 @@ class GCTracer BASE_EMBEDDED {
double steps_took_since_last_gc_;
Heap* heap_;
+
+ const char* gc_reason_;
+ const char* collector_reason_;
};
@@ -2479,12 +2705,11 @@ class PathTracer : public ObjectVisitor {
AssertNoAllocation no_alloc; // i.e. no gc allowed.
+ private:
DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
};
#endif // DEBUG || LIVE_OBJECT_LIST
} } // namespace v8::internal
-#undef HEAP
-
#endif // V8_HEAP_H_
diff --git a/src/3rdparty/v8/src/hydrogen-instructions.cc b/src/3rdparty/v8/src/hydrogen-instructions.cc
index 7979726..f81f5f0 100644
--- a/src/3rdparty/v8/src/hydrogen-instructions.cc
+++ b/src/3rdparty/v8/src/hydrogen-instructions.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -67,6 +67,14 @@ const char* Representation::Mnemonic() const {
}
+int HValue::LoopWeight() const {
+ const int w = FLAG_loop_weight;
+ static const int weights[] = { 1, w, w*w, w*w*w, w*w*w*w };
+ return weights[Min(block()->LoopNestingDepth(),
+ static_cast<int>(ARRAY_SIZE(weights)-1))];
+}
+
+
void HValue::AssumeRepresentation(Representation r) {
if (CheckFlag(kFlexibleRepresentation)) {
ChangeRepresentation(r);
@@ -268,6 +276,23 @@ bool HValue::IsDefinedAfter(HBasicBlock* other) const {
}
+HUseListNode* HUseListNode::tail() {
+ // Skip and remove dead items in the use list.
+ while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
+ tail_ = tail_->tail_;
+ }
+ return tail_;
+}
+
+
+bool HValue::CheckUsesForFlag(Flag f) {
+ for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
+ if (!it.value()->CheckFlag(f)) return false;
+ }
+ return true;
+}
+
+
HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
Advance();
}
@@ -366,7 +391,7 @@ void HValue::DeleteAndReplaceWith(HValue* other) {
// We replace all uses first, so Delete can assert that there are none.
if (other != NULL) ReplaceAllUsesWith(other);
ASSERT(HasNoUses());
- ClearOperands();
+ Kill();
DeleteFromGraph();
}
@@ -384,9 +409,17 @@ void HValue::ReplaceAllUsesWith(HValue* other) {
}
-void HValue::ClearOperands() {
+void HValue::Kill() {
+ // Instead of going through the entire use list of each operand, we only
+ // check the first item in each use list and rely on the tail() method to
+ // skip dead items, removing them lazily next time we traverse the list.
+ SetFlag(kIsDead);
for (int i = 0; i < OperandCount(); ++i) {
- SetOperandAt(i, NULL);
+ HValue* operand = OperandAt(i);
+ HUseListNode* first = operand->use_list_;
+ if (first != NULL && first->value() == this && first->index() == i) {
+ operand->use_list_ = first->tail();
+ }
}
}
@@ -416,18 +449,18 @@ void HValue::PrintRangeTo(StringStream* stream) {
void HValue::PrintChangesTo(StringStream* stream) {
- int changes_flags = ChangesFlags();
- if (changes_flags == 0) return;
+ GVNFlagSet changes_flags = ChangesFlags();
+ if (changes_flags.IsEmpty()) return;
stream->Add(" changes[");
- if (changes_flags == AllSideEffects()) {
+ if (changes_flags == AllSideEffectsFlagSet()) {
stream->Add("*");
} else {
bool add_comma = false;
-#define PRINT_DO(type) \
- if (changes_flags & (1 << kChanges##type)) { \
- if (add_comma) stream->Add(","); \
- add_comma = true; \
- stream->Add(#type); \
+#define PRINT_DO(type) \
+ if (changes_flags.Contains(kChanges##type)) { \
+ if (add_comma) stream->Add(","); \
+ add_comma = true; \
+ stream->Add(#type); \
}
GVN_FLAG_LIST(PRINT_DO);
#undef PRINT_DO
@@ -470,9 +503,9 @@ void HValue::RegisterUse(int index, HValue* new_value) {
}
-void HValue::AddNewRange(Range* r) {
- if (!HasRange()) ComputeInitialRange();
- if (!HasRange()) range_ = new Range();
+void HValue::AddNewRange(Range* r, Zone* zone) {
+ if (!HasRange()) ComputeInitialRange(zone);
+ if (!HasRange()) range_ = new(zone) Range();
ASSERT(HasRange());
r->StackUpon(range_);
range_ = r;
@@ -486,9 +519,9 @@ void HValue::RemoveLastAddedRange() {
}
-void HValue::ComputeInitialRange() {
+void HValue::ComputeInitialRange(Zone* zone) {
ASSERT(!HasRange());
- range_ = InferRange();
+ range_ = InferRange(zone);
ASSERT(HasRange());
}
@@ -646,11 +679,6 @@ void HCallNamed::PrintDataTo(StringStream* stream) {
}
-void HGlobalObject::PrintDataTo(StringStream* stream) {
- stream->Add("qml_global: %s ", qml_global()?"true":"false");
- HUnaryOperation::PrintDataTo(stream);
-}
-
void HCallGlobal::PrintDataTo(StringStream* stream) {
stream->Add("%o ", *name());
HUnaryCall::PrintDataTo(stream);
@@ -783,13 +811,86 @@ void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
}
+void HCheckMapValue::PrintDataTo(StringStream* stream) {
+ value()->PrintNameTo(stream);
+ stream->Add(" ");
+ map()->PrintNameTo(stream);
+}
+
+
+void HForInPrepareMap::PrintDataTo(StringStream* stream) {
+ enumerable()->PrintNameTo(stream);
+}
+
+
+void HForInCacheArray::PrintDataTo(StringStream* stream) {
+ enumerable()->PrintNameTo(stream);
+ stream->Add(" ");
+ map()->PrintNameTo(stream);
+ stream->Add("[%d]", idx_);
+}
+
+
+void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
+ object()->PrintNameTo(stream);
+ stream->Add(" ");
+ index()->PrintNameTo(stream);
+}
+
+
HValue* HConstant::Canonicalize() {
- return HasNoUses() && !IsBlockEntry() ? NULL : this;
+ return HasNoUses() ? NULL : this;
}
HValue* HTypeof::Canonicalize() {
- return HasNoUses() && !IsBlockEntry() ? NULL : this;
+ return HasNoUses() ? NULL : this;
+}
+
+
+HValue* HBitwise::Canonicalize() {
+ if (!representation().IsInteger32()) return this;
+ // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
+ int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
+ if (left()->IsConstant() &&
+ HConstant::cast(left())->HasInteger32Value() &&
+ HConstant::cast(left())->Integer32Value() == nop_constant) {
+ return right();
+ }
+ if (right()->IsConstant() &&
+ HConstant::cast(right())->HasInteger32Value() &&
+ HConstant::cast(right())->Integer32Value() == nop_constant) {
+ return left();
+ }
+ return this;
+}
+
+
+HValue* HAdd::Canonicalize() {
+ if (!representation().IsInteger32()) return this;
+ if (CheckUsesForFlag(kTruncatingToInt32)) ClearFlag(kCanOverflow);
+ return this;
+}
+
+
+HValue* HSub::Canonicalize() {
+ if (!representation().IsInteger32()) return this;
+ if (CheckUsesForFlag(kTruncatingToInt32)) ClearFlag(kCanOverflow);
+ return this;
+}
+
+
+HValue* HChange::Canonicalize() {
+ return (from().Equals(to())) ? value() : this;
+}
+
+
+HValue* HWrapReceiver::Canonicalize() {
+ if (HasNoUses()) return NULL;
+ if (receiver()->type().IsJSObject()) {
+ return receiver();
+ }
+ return this;
}
@@ -864,9 +965,13 @@ void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
}
-void HCheckMap::PrintDataTo(StringStream* stream) {
+void HCheckMaps::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
- stream->Add(" %p", *map());
+ stream->Add(" [%p", *map_set()->first());
+ for (int i = 1; i < map_set()->length(); ++i) {
+ stream->Add(",%p", *map_set()->at(i));
+ }
+ stream->Add("]");
}
@@ -909,15 +1014,15 @@ void HInstanceOf::PrintDataTo(StringStream* stream) {
}
-Range* HValue::InferRange() {
+Range* HValue::InferRange(Zone* zone) {
// Untagged integer32 cannot be -0, all other representations can.
- Range* result = new Range();
+ Range* result = new(zone) Range();
result->set_can_be_minus_zero(!representation().IsInteger32());
return result;
}
-Range* HChange::InferRange() {
+Range* HChange::InferRange(Zone* zone) {
Range* input_range = value()->range();
if (from().IsInteger32() &&
to().IsTagged() &&
@@ -925,46 +1030,46 @@ Range* HChange::InferRange() {
set_type(HType::Smi());
}
Range* result = (input_range != NULL)
- ? input_range->Copy()
- : HValue::InferRange();
+ ? input_range->Copy(zone)
+ : HValue::InferRange(zone);
if (to().IsInteger32()) result->set_can_be_minus_zero(false);
return result;
}
-Range* HConstant::InferRange() {
+Range* HConstant::InferRange(Zone* zone) {
if (has_int32_value_) {
- Range* result = new Range(int32_value_, int32_value_);
+ Range* result = new(zone) Range(int32_value_, int32_value_);
result->set_can_be_minus_zero(false);
return result;
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HPhi::InferRange() {
+Range* HPhi::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
if (block()->IsLoopHeader()) {
- Range* range = new Range(kMinInt, kMaxInt);
+ Range* range = new(zone) Range(kMinInt, kMaxInt);
return range;
} else {
- Range* range = OperandAt(0)->range()->Copy();
+ Range* range = OperandAt(0)->range()->Copy(zone);
for (int i = 1; i < OperandCount(); ++i) {
range->Union(OperandAt(i)->range());
}
return range;
}
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HAdd::InferRange() {
+Range* HAdd::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->AddAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
@@ -972,32 +1077,32 @@ Range* HAdd::InferRange() {
res->set_can_be_minus_zero(m0);
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HSub::InferRange() {
+Range* HSub::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->SubAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
res->set_can_be_minus_zero(a->CanBeMinusZero() && b->CanBeZero());
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HMul::InferRange() {
+Range* HMul::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
Range* b = right()->range();
- Range* res = a->Copy();
+ Range* res = a->Copy(zone);
if (!res->MulAndCheckOverflow(b)) {
ClearFlag(kCanOverflow);
}
@@ -1006,14 +1111,14 @@ Range* HMul::InferRange() {
res->set_can_be_minus_zero(m0);
return res;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HDiv::InferRange() {
+Range* HDiv::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
- Range* result = new Range();
+ Range* result = new(zone) Range();
if (left()->range()->CanBeMinusZero()) {
result->set_can_be_minus_zero(true);
}
@@ -1031,15 +1136,15 @@ Range* HDiv::InferRange() {
}
return result;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
-Range* HMod::InferRange() {
+Range* HMod::InferRange(Zone* zone) {
if (representation().IsInteger32()) {
Range* a = left()->range();
- Range* result = new Range();
+ Range* result = new(zone) Range();
if (a->CanBeMinusZero() || a->CanBeNegative()) {
result->set_can_be_minus_zero(true);
}
@@ -1048,7 +1153,7 @@ Range* HMod::InferRange() {
}
return result;
} else {
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
}
@@ -1121,7 +1226,7 @@ void HPhi::InitRealUses(int phi_id) {
HValue* value = it.value();
if (!value->IsPhi()) {
Representation rep = value->RequiredInputRepresentation(it.index());
- ++non_phi_uses_[rep.kind()];
+ non_phi_uses_[rep.kind()] += value->LoopWeight();
}
}
}
@@ -1232,10 +1337,9 @@ void HConstant::PrintDataTo(StringStream* stream) {
bool HArrayLiteral::IsCopyOnWrite() const {
- Handle<FixedArray> constant_elements = this->constant_elements();
- FixedArrayBase* constant_elements_values =
- FixedArrayBase::cast(constant_elements->get(1));
- return constant_elements_values->map() == HEAP->fixed_cow_array_map();
+ if (!boilerplate_object_->IsJSObject()) return false;
+ return Handle<JSObject>::cast(boilerplate_object_)->elements()->map() ==
+ HEAP->fixed_cow_array_map();
}
@@ -1248,40 +1352,41 @@ void HBinaryOperation::PrintDataTo(StringStream* stream) {
}
-Range* HBitwise::InferRange() {
- if (op() == Token::BIT_XOR) return HValue::InferRange();
+Range* HBitwise::InferRange(Zone* zone) {
+ if (op() == Token::BIT_XOR) return HValue::InferRange(zone);
+ const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
int32_t left_mask = (left()->range() != NULL)
? left()->range()->Mask()
- : 0xffffffff;
+ : kDefaultMask;
int32_t right_mask = (right()->range() != NULL)
? right()->range()->Mask()
- : 0xffffffff;
+ : kDefaultMask;
int32_t result_mask = (op() == Token::BIT_AND)
? left_mask & right_mask
: left_mask | right_mask;
return (result_mask >= 0)
- ? new Range(0, result_mask)
- : HValue::InferRange();
+ ? new(zone) Range(0, result_mask)
+ : HValue::InferRange(zone);
}
-Range* HSar::InferRange() {
+Range* HSar::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Sar(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HShr::InferRange() {
+Range* HShr::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
@@ -1289,39 +1394,57 @@ Range* HShr::InferRange() {
if (left()->range()->CanBeNegative()) {
// Only compute bounds if the result always fits into an int32.
return (shift_count >= 1)
- ? new Range(0, static_cast<uint32_t>(0xffffffff) >> shift_count)
- : new Range();
+ ? new(zone) Range(0,
+ static_cast<uint32_t>(0xffffffff) >> shift_count)
+ : new(zone) Range();
} else {
// For positive inputs we can use the >> operator.
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Sar(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
-Range* HShl::InferRange() {
+Range* HShl::InferRange(Zone* zone) {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
Range* result = (left()->range() != NULL)
- ? left()->range()->Copy()
- : new Range();
+ ? left()->range()->Copy(zone)
+ : new(zone) Range();
result->Shl(c->Integer32Value());
result->set_can_be_minus_zero(false);
return result;
}
}
- return HValue::InferRange();
+ return HValue::InferRange(zone);
}
+Range* HLoadKeyedSpecializedArrayElement::InferRange(Zone* zone) {
+ switch (elements_kind()) {
+ case EXTERNAL_PIXEL_ELEMENTS:
+ return new(zone) Range(0, 255);
+ case EXTERNAL_BYTE_ELEMENTS:
+ return new(zone) Range(-128, 127);
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ return new(zone) Range(0, 255);
+ case EXTERNAL_SHORT_ELEMENTS:
+ return new(zone) Range(-32768, 32767);
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ return new(zone) Range(0, 65535);
+ default:
+ return HValue::InferRange(zone);
+ }
+}
+
void HCompareGeneric::PrintDataTo(StringStream* stream) {
stream->Add(Token::Name(token()));
@@ -1330,6 +1453,13 @@ void HCompareGeneric::PrintDataTo(StringStream* stream) {
}
+void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add(Token::Name(token()));
+ stream->Add(" ");
+ HControlInstruction::PrintDataTo(stream);
+}
+
+
void HCompareIDAndBranch::PrintDataTo(StringStream* stream) {
stream->Add(Token::Name(token()));
stream->Add(" ");
@@ -1356,7 +1486,22 @@ void HGoto::PrintDataTo(StringStream* stream) {
void HCompareIDAndBranch::SetInputRepresentation(Representation r) {
input_representation_ = r;
if (r.IsDouble()) {
- SetFlag(kDeoptimizeOnUndefined);
+ // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
+ // and !=) have special handling of undefined, e.g. undefined == undefined
+ // is 'true'. Relational comparisons have a different semantic, first
+ // calling ToPrimitive() on their arguments. The standard Crankshaft
+ // tagged-to-double conversion to ensure the HCompareIDAndBranch's inputs
+ // are doubles caused 'undefined' to be converted to NaN. That's compatible
+ // out-of-the box with ordered relational comparisons (<, >, <=,
+ // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
+ // it is not consistent with the spec. For example, it would cause undefined
+ // == undefined (should be true) to be evaluated as NaN == NaN
+ // (false). Therefore, any comparisons other than ordered relational
+ // comparisons must cause a deopt when one of their arguments is undefined.
+ // See also v8:1434
+ if (!Token::IsOrderedRelationalCompareOp(token_)) {
+ SetFlag(kDeoptimizeOnUndefined);
+ }
} else {
ASSERT(r.IsInteger32());
}
@@ -1384,21 +1529,21 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
SetOperandAt(0, context);
SetOperandAt(1, object);
set_representation(Representation::Tagged());
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
for (int i = 0;
i < types->length() && types_.length() < kMaxLoadPolymorphism;
++i) {
Handle<Map> map = types->at(i);
LookupResult lookup(map->GetIsolate());
map->LookupInDescriptors(NULL, *name, &lookup);
- if (lookup.IsProperty()) {
+ if (lookup.IsFound()) {
switch (lookup.type()) {
case FIELD: {
int index = lookup.GetLocalFieldIndexFromMap(*map);
if (index < 0) {
- SetFlag(kDependsOnInobjectFields);
+ SetGVNFlag(kDependsOnInobjectFields);
} else {
- SetFlag(kDependsOnBackingStoreFields);
+ SetGVNFlag(kDependsOnBackingStoreFields);
}
types_.Add(types->at(i));
break;
@@ -1463,10 +1608,15 @@ void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) {
bool HLoadKeyedFastElement::RequiresHoleCheck() {
+ if (hole_check_mode_ == OMIT_HOLE_CHECK) {
+ return false;
+ }
+
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
if (!use->IsChange()) return true;
}
+
return false;
}
@@ -1487,6 +1637,39 @@ void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
}
+HValue* HLoadKeyedGeneric::Canonicalize() {
+ // Recognize generic keyed loads that use property name generated
+ // by for-in statement as a key and rewrite them into fast property load
+ // by index.
+ if (key()->IsLoadKeyedFastElement()) {
+ HLoadKeyedFastElement* key_load = HLoadKeyedFastElement::cast(key());
+ if (key_load->object()->IsForInCacheArray()) {
+ HForInCacheArray* names_cache =
+ HForInCacheArray::cast(key_load->object());
+
+ if (names_cache->enumerable() == object()) {
+ HForInCacheArray* index_cache =
+ names_cache->index_cache();
+ HCheckMapValue* map_check =
+ new(block()->zone()) HCheckMapValue(object(), names_cache->map());
+ HInstruction* index = new(block()->zone()) HLoadKeyedFastElement(
+ index_cache,
+ key_load->key(),
+ HLoadKeyedFastElement::OMIT_HOLE_CHECK);
+ HLoadFieldByIndex* load = new(block()->zone()) HLoadFieldByIndex(
+ object(), index);
+ map_check->InsertBefore(this);
+ index->InsertBefore(this);
+ load->InsertBefore(this);
+ return load;
+ }
+ }
+ }
+
+ return this;
+}
+
+
void HLoadKeyedSpecializedArrayElement::PrintDataTo(
StringStream* stream) {
external_pointer()->PrintNameTo(stream);
@@ -1693,7 +1876,7 @@ HType HValue::CalculateInferredType() {
}
-HType HCheckMap::CalculateInferredType() {
+HType HCheckMaps::CalculateInferredType() {
return value()->type();
}
@@ -1785,6 +1968,17 @@ HType HStringCharFromCode::CalculateInferredType() {
}
+HType HAllocateObject::CalculateInferredType() {
+ return HType::JSObject();
+}
+
+
+HType HFastLiteral::CalculateInferredType() {
+ // TODO(mstarzinger): Be smarter, could also be JSArray here.
+ return HType::JSObject();
+}
+
+
HType HArrayLiteral::CalculateInferredType() {
return HType::JSArray();
}
@@ -1892,6 +2086,167 @@ HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
}
+#define H_CONSTANT_INT32(val) \
+new(zone) HConstant(FACTORY->NewNumberFromInt(val, TENURED), \
+ Representation::Integer32())
+#define H_CONSTANT_DOUBLE(val) \
+new(zone) HConstant(FACTORY->NewNumber(val, TENURED), \
+ Representation::Double())
+
+#define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
+HInstruction* HInstr::New##HInstr(Zone* zone, \
+ HValue* context, \
+ HValue* left, \
+ HValue* right) { \
+ if (left->IsConstant() && right->IsConstant()) { \
+ HConstant* c_left = HConstant::cast(left); \
+ HConstant* c_right = HConstant::cast(right); \
+ if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
+ double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
+ if (TypeInfo::IsInt32Double(double_res)) { \
+ return H_CONSTANT_INT32(static_cast<int32_t>(double_res)); \
+ } \
+ return H_CONSTANT_DOUBLE(double_res); \
+ } \
+ } \
+ return new(zone) HInstr(context, left, right); \
+}
+
+
+DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
+DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
+DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
+
+#undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
+
+
+HInstruction* HMod::NewHMod(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ if (left->IsConstant() && right->IsConstant()) {
+ HConstant* c_left = HConstant::cast(left);
+ HConstant* c_right = HConstant::cast(right);
+ if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
+ int32_t dividend = c_left->Integer32Value();
+ int32_t divisor = c_right->Integer32Value();
+ if (divisor != 0) {
+ int32_t res = dividend % divisor;
+ if ((res == 0) && (dividend < 0)) {
+ return H_CONSTANT_DOUBLE(-0.0);
+ }
+ return H_CONSTANT_INT32(res);
+ }
+ }
+ }
+ return new(zone) HMod(context, left, right);
+}
+
+
+HInstruction* HDiv::NewHDiv(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ // If left and right are constant values, try to return a constant value.
+ if (left->IsConstant() && right->IsConstant()) {
+ HConstant* c_left = HConstant::cast(left);
+ HConstant* c_right = HConstant::cast(right);
+ if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
+ if (c_right->DoubleValue() != 0) {
+ double double_res = c_left->DoubleValue() / c_right->DoubleValue();
+ if (TypeInfo::IsInt32Double(double_res)) {
+ return H_CONSTANT_INT32(static_cast<int32_t>(double_res));
+ }
+ return H_CONSTANT_DOUBLE(double_res);
+ }
+ }
+ }
+ return new(zone) HDiv(context, left, right);
+}
+
+
+HInstruction* HBitwise::NewHBitwise(Zone* zone,
+ Token::Value op,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ if (left->IsConstant() && right->IsConstant()) {
+ HConstant* c_left = HConstant::cast(left);
+ HConstant* c_right = HConstant::cast(right);
+ if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
+ int32_t result;
+ int32_t v_left = c_left->NumberValueAsInteger32();
+ int32_t v_right = c_right->NumberValueAsInteger32();
+ switch (op) {
+ case Token::BIT_XOR:
+ result = v_left ^ v_right;
+ break;
+ case Token::BIT_AND:
+ result = v_left & v_right;
+ break;
+ case Token::BIT_OR:
+ result = v_left | v_right;
+ break;
+ default:
+ result = 0; // Please the compiler.
+ UNREACHABLE();
+ }
+ return H_CONSTANT_INT32(result);
+ }
+ }
+ return new(zone) HBitwise(op, context, left, right);
+}
+
+
+#define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
+HInstruction* HInstr::New##HInstr(Zone* zone, \
+ HValue* context, \
+ HValue* left, \
+ HValue* right) { \
+ if (left->IsConstant() && right->IsConstant()) { \
+ HConstant* c_left = HConstant::cast(left); \
+ HConstant* c_right = HConstant::cast(right); \
+ if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
+ return H_CONSTANT_INT32(result); \
+ } \
+ } \
+ return new(zone) HInstr(context, left, right); \
+}
+
+
+DEFINE_NEW_H_BITWISE_INSTR(HSar,
+c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
+DEFINE_NEW_H_BITWISE_INSTR(HShl,
+c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
+
+#undef DEFINE_NEW_H_BITWISE_INSTR
+
+
+HInstruction* HShr::NewHShr(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right) {
+ if (left->IsConstant() && right->IsConstant()) {
+ HConstant* c_left = HConstant::cast(left);
+ HConstant* c_right = HConstant::cast(right);
+ if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
+ int32_t left_val = c_left->NumberValueAsInteger32();
+ int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
+ if ((right_val == 0) && (left_val < 0)) {
+ return H_CONSTANT_DOUBLE(
+ static_cast<double>(static_cast<uint32_t>(left_val)));
+ }
+ return H_CONSTANT_INT32(static_cast<uint32_t>(left_val) >> right_val);
+ }
+ }
+ return new(zone) HShr(context, left, right);
+}
+
+
+#undef H_CONSTANT_INT32
+#undef H_CONSTANT_DOUBLE
+
+
void HIn::PrintDataTo(StringStream* stream) {
key()->PrintNameTo(stream);
stream->Add(" ");
@@ -1899,6 +2254,46 @@ void HIn::PrintDataTo(StringStream* stream) {
}
+Representation HPhi::InferredRepresentation() {
+ bool double_occurred = false;
+ bool int32_occurred = false;
+ for (int i = 0; i < OperandCount(); ++i) {
+ HValue* value = OperandAt(i);
+ if (value->IsUnknownOSRValue()) {
+ HPhi* hint_value = HUnknownOSRValue::cast(value)->incoming_value();
+ if (hint_value != NULL) {
+ Representation hint = hint_value->representation();
+ if (hint.IsDouble()) double_occurred = true;
+ if (hint.IsInteger32()) int32_occurred = true;
+ }
+ continue;
+ }
+ if (value->representation().IsDouble()) double_occurred = true;
+ if (value->representation().IsInteger32()) int32_occurred = true;
+ if (value->representation().IsTagged()) {
+ if (value->IsConstant()) {
+ HConstant* constant = HConstant::cast(value);
+ if (constant->IsConvertibleToInteger()) {
+ int32_occurred = true;
+ } else if (constant->HasNumberValue()) {
+ double_occurred = true;
+ } else {
+ return Representation::Tagged();
+ }
+ } else {
+ return Representation::Tagged();
+ }
+ }
+ }
+
+ if (double_occurred) return Representation::Double();
+
+ if (int32_occurred) return Representation::Integer32();
+
+ return Representation::None();
+}
+
+
// Node-specific verification code is only included in debug mode.
#ifdef DEBUG
diff --git a/src/3rdparty/v8/src/hydrogen-instructions.h b/src/3rdparty/v8/src/hydrogen-instructions.h
index 2143533..d0dd568 100644
--- a/src/3rdparty/v8/src/hydrogen-instructions.h
+++ b/src/3rdparty/v8/src/hydrogen-instructions.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -62,6 +62,7 @@ class LChunkBuilder;
V(AbnormalExit) \
V(AccessArgumentsAt) \
V(Add) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -84,7 +85,7 @@ class LChunkBuilder;
V(Change) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -97,11 +98,13 @@ class LChunkBuilder;
V(CompareConstantEqAndBranch) \
V(Constant) \
V(Context) \
+ V(DeclareGlobals) \
V(DeleteProperty) \
V(Deoptimize) \
V(Div) \
V(ElementsKind) \
V(EnterInlined) \
+ V(FastLiteral) \
V(FixedArrayBaseLength) \
V(ForceRepresentation) \
V(FunctionLiteral) \
@@ -118,8 +121,10 @@ class LChunkBuilder;
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
V(IsObjectAndBranch) \
+ V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
+ V(StringCompareAndBranch) \
V(JSArrayLength) \
V(LeaveInlined) \
V(LoadContextSlot) \
@@ -143,6 +148,7 @@ class LChunkBuilder;
V(Parameter) \
V(Power) \
V(PushArgument) \
+ V(Random) \
V(RegExpLiteral) \
V(Return) \
V(Sar) \
@@ -168,20 +174,26 @@ class LChunkBuilder;
V(ThisFunction) \
V(Throw) \
V(ToFastProperties) \
- V(ToInt32) \
V(TransitionElementsKind) \
V(Typeof) \
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
V(UseConst) \
- V(ValueOf)
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex) \
+ V(DateField) \
+ V(WrapReceiver)
#define GVN_FLAG_LIST(V) \
V(Calls) \
V(InobjectFields) \
V(BackingStoreFields) \
V(ElementsKind) \
+ V(ElementsPointer) \
V(ArrayElements) \
V(DoubleArrayElements) \
V(SpecializedArrayElements) \
@@ -225,10 +237,14 @@ class Range: public ZoneObject {
int32_t upper() const { return upper_; }
int32_t lower() const { return lower_; }
Range* next() const { return next_; }
- Range* CopyClearLower() const { return new Range(kMinInt, upper_); }
- Range* CopyClearUpper() const { return new Range(lower_, kMaxInt); }
- Range* Copy() const {
- Range* result = new Range(lower_, upper_);
+ Range* CopyClearLower(Zone* zone) const {
+ return new(zone) Range(kMinInt, upper_);
+ }
+ Range* CopyClearUpper(Zone* zone) const {
+ return new(zone) Range(lower_, kMaxInt);
+ }
+ Range* Copy(Zone* zone) const {
+ Range* result = new(zone) Range(lower_, upper_);
result->set_can_be_minus_zero(CanBeMinusZero());
return result;
}
@@ -438,7 +454,7 @@ class HUseListNode: public ZoneObject {
: tail_(tail), value_(value), index_(index) {
}
- HUseListNode* tail() const { return tail_; }
+ HUseListNode* tail();
HValue* value() const { return value_; }
int index() const { return index_; }
@@ -488,18 +504,26 @@ class HUseIterator BASE_EMBEDDED {
};
+// There must be one corresponding kDepends flag for every kChanges flag and
+// the order of the kChanges flags must be exactly the same as of the kDepends
+// flags.
+enum GVNFlag {
+ // Declare global value numbering flags.
+#define DECLARE_FLAG(type) kChanges##type, kDependsOn##type,
+ GVN_FLAG_LIST(DECLARE_FLAG)
+#undef DECLARE_FLAG
+ kAfterLastFlag,
+ kLastFlag = kAfterLastFlag - 1
+};
+
+typedef EnumSet<GVNFlag> GVNFlagSet;
+
+
class HValue: public ZoneObject {
public:
static const int kNoNumber = -1;
- // There must be one corresponding kDepends flag for every kChanges flag and
- // the order of the kChanges flags must be exactly the same as of the kDepends
- // flags.
enum Flag {
- // Declare global value numbering flags.
- #define DECLARE_DO(type) kChanges##type, kDependsOn##type,
- GVN_FLAG_LIST(DECLARE_DO)
- #undef DECLARE_DO
kFlexibleRepresentation,
// Participate in Global Value Numbering, i.e. elimination of
// unnecessary recomputations. If an instruction sets this flag, it must
@@ -512,15 +536,16 @@ class HValue: public ZoneObject {
kDeoptimizeOnUndefined,
kIsArguments,
kTruncatingToInt32,
- kLastFlag = kTruncatingToInt32
+ kIsDead,
+ kLastFlag = kIsDead
};
STATIC_ASSERT(kLastFlag < kBitsPerInt);
static const int kChangesToDependsFlagsLeftShift = 1;
- static int ConvertChangesToDependsFlags(int flags) {
- return flags << kChangesToDependsFlagsLeftShift;
+ static GVNFlagSet ConvertChangesToDependsFlags(GVNFlagSet flags) {
+ return GVNFlagSet(flags.ToIntegral() << kChangesToDependsFlagsLeftShift);
}
static HValue* cast(HValue* value) { return value; }
@@ -557,6 +582,7 @@ class HValue: public ZoneObject {
HBasicBlock* block() const { return block_; }
void SetBlock(HBasicBlock* block);
+ int LoopWeight() const;
int id() const { return id_; }
void set_id(int id) { id_ = id; }
@@ -611,30 +637,63 @@ class HValue: public ZoneObject {
return use_list_ != NULL && use_list_->tail() != NULL;
}
int UseCount() const;
- void ClearOperands();
+
+ // Mark this HValue as dead and to be removed from other HValues' use lists.
+ void Kill();
int flags() const { return flags_; }
void SetFlag(Flag f) { flags_ |= (1 << f); }
void ClearFlag(Flag f) { flags_ &= ~(1 << f); }
bool CheckFlag(Flag f) const { return (flags_ & (1 << f)) != 0; }
- void SetAllSideEffects() { flags_ |= AllSideEffects(); }
- void ClearAllSideEffects() { flags_ &= ~AllSideEffects(); }
- bool HasSideEffects() const { return (flags_ & AllSideEffects()) != 0; }
+ // Returns true if the flag specified is set for all uses, false otherwise.
+ bool CheckUsesForFlag(Flag f);
+
+ GVNFlagSet gvn_flags() const { return gvn_flags_; }
+ void SetGVNFlag(GVNFlag f) { gvn_flags_.Add(f); }
+ void ClearGVNFlag(GVNFlag f) { gvn_flags_.Remove(f); }
+ bool CheckGVNFlag(GVNFlag f) const { return gvn_flags_.Contains(f); }
+ void SetAllSideEffects() { gvn_flags_.Add(AllSideEffectsFlagSet()); }
+ void ClearAllSideEffects() {
+ gvn_flags_.Remove(AllSideEffectsFlagSet());
+ }
+ bool HasSideEffects() const {
+ return gvn_flags_.ContainsAnyOf(AllSideEffectsFlagSet());
+ }
bool HasObservableSideEffects() const {
- return (flags_ & ObservableSideEffects()) != 0;
+ return gvn_flags_.ContainsAnyOf(AllObservableSideEffectsFlagSet());
+ }
+
+ GVNFlagSet DependsOnFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllDependsOnFlagSet());
+ return result;
+ }
+
+ GVNFlagSet SideEffectFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllSideEffectsFlagSet());
+ return result;
+ }
+
+ GVNFlagSet ChangesFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllChangesFlagSet());
+ return result;
}
- int ChangesFlags() const { return flags_ & ChangesFlagsMask(); }
- int ObservableChangesFlags() const {
- return flags_ & ChangesFlagsMask() & ObservableSideEffects();
+ GVNFlagSet ObservableChangesFlags() const {
+ GVNFlagSet result = gvn_flags_;
+ result.Intersect(AllChangesFlagSet());
+ result.Intersect(AllObservableSideEffectsFlagSet());
+ return result;
}
Range* range() const { return range_; }
bool HasRange() const { return range_ != NULL; }
- void AddNewRange(Range* r);
+ void AddNewRange(Range* r, Zone* zone);
void RemoveLastAddedRange();
- void ComputeInitialRange();
+ void ComputeInitialRange(Zone* zone);
// Representation helpers.
virtual Representation RequiredInputRepresentation(int index) = 0;
@@ -679,7 +738,7 @@ class HValue: public ZoneObject {
return false;
}
virtual void RepresentationChanged(Representation to) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual void DeleteFromGraph() = 0;
virtual void InternalSetOperandAt(int index, HValue* value) = 0;
void clear_block() {
@@ -693,25 +752,39 @@ class HValue: public ZoneObject {
representation_ = r;
}
- private:
- static int ChangesFlagsMask() {
- int result = 0;
+ static GVNFlagSet AllDependsOnFlagSet() {
+ GVNFlagSet result;
+ // Create changes mask.
+#define ADD_FLAG(type) result.Add(kDependsOn##type);
+ GVN_FLAG_LIST(ADD_FLAG)
+#undef ADD_FLAG
+ return result;
+ }
+
+ static GVNFlagSet AllChangesFlagSet() {
+ GVNFlagSet result;
// Create changes mask.
-#define ADD_FLAG(type) result |= (1 << kChanges##type);
+#define ADD_FLAG(type) result.Add(kChanges##type);
GVN_FLAG_LIST(ADD_FLAG)
#undef ADD_FLAG
return result;
}
// A flag mask to mark an instruction as having arbitrary side effects.
- static int AllSideEffects() {
- return ChangesFlagsMask() & ~(1 << kChangesOsrEntries);
+ static GVNFlagSet AllSideEffectsFlagSet() {
+ GVNFlagSet result = AllChangesFlagSet();
+ result.Remove(kChangesOsrEntries);
+ return result;
}
// A flag mask of all side effects that can make observable changes in
// an executing program (i.e. are not safe to repeat, move or remove);
- static int ObservableSideEffects() {
- return ChangesFlagsMask() & ~(1 << kChangesElementsKind);
+ static GVNFlagSet AllObservableSideEffectsFlagSet() {
+ GVNFlagSet result = AllChangesFlagSet();
+ result.Remove(kChangesElementsKind);
+ result.Remove(kChangesElementsPointer);
+ result.Remove(kChangesMaps);
+ return result;
}
// Remove the matching use from the use list if present. Returns the
@@ -731,7 +804,9 @@ class HValue: public ZoneObject {
HUseListNode* use_list_;
Range* range_;
int flags_;
+ GVNFlagSet gvn_flags_;
+ private:
DISALLOW_COPY_AND_ASSIGN(HValue);
};
@@ -753,6 +828,8 @@ class HInstruction: public HValue {
bool has_position() const { return position_ != RelocInfo::kNoPosition; }
void set_position(int position) { position_ = position; }
+ bool CanTruncateToInt32() const { return CheckFlag(kTruncatingToInt32); }
+
virtual LInstruction* CompileToLithium(LChunkBuilder* builder) = 0;
#ifdef DEBUG
@@ -768,7 +845,7 @@ class HInstruction: public HValue {
: next_(NULL),
previous_(NULL),
position_(RelocInfo::kNoPosition) {
- SetFlag(kDependsOnOsrEntries);
+ SetGVNFlag(kDependsOnOsrEntries);
}
virtual void DeleteFromGraph() { Unlink(); }
@@ -1050,10 +1127,6 @@ class HUnaryOperation: public HTemplateInstruction<1> {
return reinterpret_cast<HUnaryOperation*>(value);
}
- virtual bool CanTruncateToInt32() const {
- return CheckFlag(kTruncatingToInt32);
- }
-
HValue* value() { return OperandAt(0); }
virtual void PrintDataTo(StringStream* stream);
};
@@ -1127,17 +1200,21 @@ class HChange: public HUnaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
virtual HType CalculateInferredType();
+ virtual HValue* Canonicalize();
Representation from() { return value()->representation(); }
Representation to() { return representation(); }
bool deoptimize_on_undefined() const {
return CheckFlag(kDeoptimizeOnUndefined);
}
+ bool deoptimize_on_minus_zero() const {
+ return CheckFlag(kBailoutOnMinusZero);
+ }
virtual Representation RequiredInputRepresentation(int index) {
return from();
}
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual void PrintDataTo(StringStream* stream);
@@ -1167,37 +1244,6 @@ class HClampToUint8: public HUnaryOperation {
};
-class HToInt32: public HUnaryOperation {
- public:
- explicit HToInt32(HValue* value)
- : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- SetFlag(kUseGVN);
- }
-
- virtual Representation RequiredInputRepresentation(int index) {
- return Representation::None();
- }
-
- virtual bool CanTruncateToInt32() const {
- return true;
- }
-
- virtual HValue* Canonicalize() {
- if (value()->representation().IsInteger32()) {
- return value();
- } else {
- return this;
- }
- }
-
- DECLARE_CONCRETE_INSTRUCTION(ToInt32)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-};
-
-
class HSimulate: public HInstruction {
public:
HSimulate(int ast_id, int pop_count)
@@ -1304,29 +1350,42 @@ class HStackCheck: public HTemplateInstruction<1> {
class HEnterInlined: public HTemplateInstruction<0> {
public:
HEnterInlined(Handle<JSFunction> closure,
+ int arguments_count,
FunctionLiteral* function,
- CallKind call_kind)
+ CallKind call_kind,
+ bool is_construct,
+ Variable* arguments)
: closure_(closure),
+ arguments_count_(arguments_count),
function_(function),
- call_kind_(call_kind) {
+ call_kind_(call_kind),
+ is_construct_(is_construct),
+ arguments_(arguments) {
}
virtual void PrintDataTo(StringStream* stream);
Handle<JSFunction> closure() const { return closure_; }
+ int arguments_count() const { return arguments_count_; }
FunctionLiteral* function() const { return function_; }
CallKind call_kind() const { return call_kind_; }
+ bool is_construct() const { return is_construct_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
+ Variable* arguments() { return arguments_; }
+
DECLARE_CONCRETE_INSTRUCTION(EnterInlined)
private:
Handle<JSFunction> closure_;
+ int arguments_count_;
FunctionLiteral* function_;
CallKind call_kind_;
+ bool is_construct_;
+ Variable* arguments_;
};
@@ -1420,32 +1479,48 @@ class HOuterContext: public HUnaryOperation {
};
-class HGlobalObject: public HUnaryOperation {
+class HDeclareGlobals: public HUnaryOperation {
public:
- explicit HGlobalObject(HValue* context) : HUnaryOperation(context), qml_global_(false) {
+ HDeclareGlobals(HValue* context,
+ Handle<FixedArray> pairs,
+ int flags)
+ : HUnaryOperation(context),
+ pairs_(pairs),
+ flags_(flags) {
set_representation(Representation::Tagged());
- SetFlag(kUseGVN);
+ SetAllSideEffects();
}
- virtual void PrintDataTo(StringStream* stream);
+ HValue* context() { return OperandAt(0); }
+ Handle<FixedArray> pairs() const { return pairs_; }
+ int flags() const { return flags_; }
- DECLARE_CONCRETE_INSTRUCTION(GlobalObject)
+ DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals)
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
+ private:
+ Handle<FixedArray> pairs_;
+ int flags_;
+};
- bool qml_global() { return qml_global_; }
- void set_qml_global(bool v) { qml_global_ = v; }
- protected:
- virtual bool DataEquals(HValue* other) {
- HGlobalObject* o = HGlobalObject::cast(other);
- return o->qml_global_ == qml_global_;
+class HGlobalObject: public HUnaryOperation {
+ public:
+ explicit HGlobalObject(HValue* context) : HUnaryOperation(context) {
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
}
- private:
- bool qml_global_;
+ DECLARE_CONCRETE_INSTRUCTION(GlobalObject)
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ protected:
+ virtual bool DataEquals(HValue* other) { return true; }
};
@@ -1605,13 +1680,14 @@ class HCallNamed: public HUnaryCall {
};
-class HCallFunction: public HUnaryCall {
+class HCallFunction: public HBinaryCall {
public:
- HCallFunction(HValue* context, int argument_count)
- : HUnaryCall(context, argument_count) {
+ HCallFunction(HValue* context, HValue* function, int argument_count)
+ : HBinaryCall(context, function, argument_count) {
}
- HValue* context() { return value(); }
+ HValue* context() { return first(); }
+ HValue* function() { return second(); }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -1624,7 +1700,7 @@ class HCallFunction: public HUnaryCall {
class HCallGlobal: public HUnaryCall {
public:
HCallGlobal(HValue* context, Handle<String> name, int argument_count)
- : HUnaryCall(context, argument_count), name_(name), qml_global_(false) {
+ : HUnaryCall(context, argument_count), name_(name) {
}
virtual void PrintDataTo(StringStream* stream);
@@ -1636,14 +1712,10 @@ class HCallGlobal: public HUnaryCall {
return Representation::Tagged();
}
- bool qml_global() { return qml_global_; }
- void set_qml_global(bool v) { qml_global_ = v; }
-
DECLARE_CONCRETE_INSTRUCTION(CallGlobal)
private:
Handle<String> name_;
- bool qml_global_;
};
@@ -1722,8 +1794,8 @@ class HJSArrayLength: public HTemplateInstruction<2> {
SetOperandAt(1, typecheck);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnArrayLengths);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnArrayLengths);
+ SetGVNFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1747,7 +1819,7 @@ class HFixedArrayBaseLength: public HUnaryOperation {
explicit HFixedArrayBaseLength(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnArrayLengths);
+ SetGVNFlag(kDependsOnArrayLengths);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1766,7 +1838,7 @@ class HElementsKind: public HUnaryOperation {
explicit HElementsKind(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
- SetFlag(kDependsOnElementsKind);
+ SetGVNFlag(kDependsOnElementsKind);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1821,6 +1893,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
set_representation(Representation::Double());
break;
default:
@@ -1851,6 +1924,7 @@ class HUnaryMathOperation: public HTemplateInstruction<2> {
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
return Representation::Double();
case kMathAbs:
return representation();
@@ -1892,8 +1966,7 @@ class HLoadElements: public HUnaryOperation {
explicit HLoadElements(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
- SetFlag(kDependsOnElementsKind);
+ SetGVNFlag(kDependsOnElementsPointer);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1930,17 +2003,59 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
};
-class HCheckMap: public HTemplateInstruction<2> {
+class HCheckMaps: public HTemplateInstruction<2> {
public:
- HCheckMap(HValue* value, Handle<Map> map, HValue* typecheck = NULL)
- : map_(map) {
+ HCheckMaps(HValue* value, Handle<Map> map, HValue* typecheck = NULL) {
SetOperandAt(0, value);
// If callers don't depend on a typecheck, they can pass in NULL. In that
// case we use a copy of the |value| argument as a dummy value.
SetOperandAt(1, typecheck != NULL ? typecheck : value);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnElementsKind);
+ map_set()->Add(map);
+ }
+ HCheckMaps(HValue* value, SmallMapList* maps) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, value);
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnElementsKind);
+ for (int i = 0; i < maps->length(); i++) {
+ map_set()->Add(maps->at(i));
+ }
+ map_set()->Sort();
+ }
+
+ static HCheckMaps* NewWithTransitions(HValue* object, Handle<Map> map) {
+ HCheckMaps* check_map = new HCheckMaps(object, map);
+ SmallMapList* map_set = check_map->map_set();
+
+ // If the map to check has the untransitioned elements, it can be hoisted
+ // above TransitionElements instructions.
+ if (map->has_fast_smi_only_elements()) {
+ check_map->ClearGVNFlag(kDependsOnElementsKind);
+ }
+
+ Map* transitioned_fast_element_map =
+ map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL);
+ ASSERT(transitioned_fast_element_map == NULL ||
+ map->elements_kind() != FAST_ELEMENTS);
+ if (transitioned_fast_element_map != NULL) {
+ map_set->Add(Handle<Map>(transitioned_fast_element_map));
+ }
+ Map* transitioned_double_map =
+ map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL);
+ ASSERT(transitioned_double_map == NULL ||
+ map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+ if (transitioned_double_map != NULL) {
+ map_set->Add(Handle<Map>(transitioned_double_map));
+ }
+ map_set->Sort();
+
+ return check_map;
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -1950,18 +2065,23 @@ class HCheckMap: public HTemplateInstruction<2> {
virtual HType CalculateInferredType();
HValue* value() { return OperandAt(0); }
- Handle<Map> map() const { return map_; }
+ SmallMapList* map_set() { return &map_set_; }
- DECLARE_CONCRETE_INSTRUCTION(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps)
protected:
virtual bool DataEquals(HValue* other) {
- HCheckMap* b = HCheckMap::cast(other);
- return map_.is_identical_to(b->map());
+ HCheckMaps* b = HCheckMaps::cast(other);
+ // Relies on the fact that map_set has been sorted before.
+ if (map_set()->length() != b->map_set()->length()) return false;
+ for (int i = 0; i < map_set()->length(); i++) {
+ if (!map_set()->at(i).is_identical_to(b->map_set()->at(i))) return false;
+ }
+ return true;
}
private:
- Handle<Map> map_;
+ SmallMapList map_set_;
};
@@ -2098,7 +2218,7 @@ class HCheckPrototypeMaps: public HTemplateInstruction<0> {
HCheckPrototypeMaps(Handle<JSObject> prototype, Handle<JSObject> holder)
: prototype_(prototype), holder_(holder) {
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
}
#ifdef DEBUG
@@ -2174,22 +2294,9 @@ class HPhi: public HValue {
SetFlag(kFlexibleRepresentation);
}
- virtual Representation InferredRepresentation() {
- bool double_occurred = false;
- bool int32_occurred = false;
- for (int i = 0; i < OperandCount(); ++i) {
- HValue* value = OperandAt(i);
- if (value->representation().IsDouble()) double_occurred = true;
- if (value->representation().IsInteger32()) int32_occurred = true;
- if (value->representation().IsTagged()) return Representation::Tagged();
- }
-
- if (double_occurred) return Representation::Double();
- if (int32_occurred) return Representation::Integer32();
- return Representation::None();
- }
+ virtual Representation InferredRepresentation();
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual Representation RequiredInputRepresentation(int index) {
return representation();
}
@@ -2340,13 +2447,24 @@ class HConstant: public HTemplateInstruction<0> {
ASSERT(HasDoubleValue());
return double_value_;
}
+ bool HasNumberValue() const { return has_int32_value_ || has_double_value_; }
+ int32_t NumberValueAsInteger32() const {
+ ASSERT(HasNumberValue());
+ if (has_int32_value_) return int32_value_;
+ return DoubleToInt32(double_value_);
+ }
bool HasStringValue() const { return handle_->IsString(); }
bool ToBoolean() const;
virtual intptr_t Hashcode() {
ASSERT(!HEAP->allow_allocation(false));
- return reinterpret_cast<intptr_t>(*handle());
+ intptr_t hash = reinterpret_cast<intptr_t>(*handle());
+ // Prevent smis from having fewer hash values when truncated to
+ // the least significant bits.
+ const int kShiftSize = kSmiShiftSize + kSmiTagSize;
+ STATIC_ASSERT(kShiftSize != 0);
+ return hash ^ (hash >> kShiftSize);
}
#ifdef DEBUG
@@ -2356,7 +2474,7 @@ class HConstant: public HTemplateInstruction<0> {
DECLARE_CONCRETE_INSTRUCTION(Constant)
protected:
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
virtual bool DataEquals(HValue* other) {
HConstant* other_constant = HConstant::cast(other);
@@ -2406,6 +2524,27 @@ class HBinaryOperation: public HTemplateInstruction<3> {
};
+class HWrapReceiver: public HTemplateInstruction<2> {
+ public:
+ HWrapReceiver(HValue* receiver, HValue* function) {
+ set_representation(Representation::Tagged());
+ SetOperandAt(0, receiver);
+ SetOperandAt(1, function);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ HValue* receiver() { return OperandAt(0); }
+ HValue* function() { return OperandAt(1); }
+
+ virtual HValue* Canonicalize();
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver)
+};
+
+
class HApplyArguments: public HTemplateInstruction<4> {
public:
HApplyArguments(HValue* function,
@@ -2730,6 +2869,18 @@ class HIsObjectAndBranch: public HUnaryControlInstruction {
DECLARE_CONCRETE_INSTRUCTION(IsObjectAndBranch)
};
+class HIsStringAndBranch: public HUnaryControlInstruction {
+ public:
+ explicit HIsStringAndBranch(HValue* value)
+ : HUnaryControlInstruction(value, NULL, NULL) { }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch)
+};
+
class HIsSmiAndBranch: public HUnaryControlInstruction {
public:
@@ -2760,6 +2911,42 @@ class HIsUndetectableAndBranch: public HUnaryControlInstruction {
};
+class HStringCompareAndBranch: public HTemplateControlInstruction<2, 3> {
+ public:
+ HStringCompareAndBranch(HValue* context,
+ HValue* left,
+ HValue* right,
+ Token::Value token)
+ : token_(token) {
+ ASSERT(Token::IsCompareOp(token));
+ SetOperandAt(0, context);
+ SetOperandAt(1, left);
+ SetOperandAt(2, right);
+ set_representation(Representation::Tagged());
+ }
+
+ HValue* context() { return OperandAt(0); }
+ HValue* left() { return OperandAt(1); }
+ HValue* right() { return OperandAt(2); }
+ Token::Value token() const { return token_; }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ Representation GetInputRepresentation() const {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch)
+
+ private:
+ Token::Value token_;
+};
+
+
class HIsConstructCallAndBranch: public HTemplateControlInstruction<2, 0> {
public:
virtual Representation RequiredInputRepresentation(int index) {
@@ -2942,6 +3129,23 @@ class HPower: public HTemplateInstruction<2> {
};
+class HRandom: public HTemplateInstruction<1> {
+ public:
+ explicit HRandom(HValue* global_object) {
+ SetOperandAt(0, global_object);
+ set_representation(Representation::Double());
+ }
+
+ HValue* global_object() { return OperandAt(0); }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Random)
+};
+
+
class HAdd: public HArithmeticBinaryOperation {
public:
HAdd(HValue* context, HValue* left, HValue* right)
@@ -2957,14 +3161,21 @@ class HAdd: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+ static HInstruction* NewHAdd(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
virtual HType CalculateInferredType();
+ virtual HValue* Canonicalize();
+
DECLARE_CONCRETE_INSTRUCTION(Add)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -2977,12 +3188,19 @@ class HSub: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+ virtual HValue* Canonicalize();
+
+ static HInstruction* NewHSub(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
DECLARE_CONCRETE_INSTRUCTION(Sub)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3000,12 +3218,17 @@ class HMul: public HArithmeticBinaryOperation {
return !representation().IsTagged();
}
+ static HInstruction* NewHMul(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
DECLARE_CONCRETE_INSTRUCTION(Mul)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3028,12 +3251,17 @@ class HMod: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+ static HInstruction* NewHMod(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
DECLARE_CONCRETE_INSTRUCTION(Mod)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3047,12 +3275,17 @@ class HDiv: public HArithmeticBinaryOperation {
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
+ static HInstruction* NewHDiv(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
DECLARE_CONCRETE_INSTRUCTION(Div)
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
};
@@ -3069,6 +3302,14 @@ class HBitwise: public HBitwiseBinaryOperation {
virtual bool IsCommutative() const { return true; }
+ virtual HValue* Canonicalize();
+
+ static HInstruction* NewHBitwise(Zone* zone,
+ Token::Value op,
+ HValue* context,
+ HValue* left,
+ HValue* right);
+
DECLARE_CONCRETE_INSTRUCTION(Bitwise)
protected:
@@ -3076,7 +3317,7 @@ class HBitwise: public HBitwiseBinaryOperation {
return op() == HBitwise::cast(other)->op();
}
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
private:
Token::Value op_;
@@ -3088,7 +3329,12 @@ class HShl: public HBitwiseBinaryOperation {
HShl(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
+
+ static HInstruction* NewHShl(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
DECLARE_CONCRETE_INSTRUCTION(Shl)
@@ -3102,7 +3348,12 @@ class HShr: public HBitwiseBinaryOperation {
HShr(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
+
+ static HInstruction* NewHShr(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
DECLARE_CONCRETE_INSTRUCTION(Shr)
@@ -3116,7 +3367,12 @@ class HSar: public HBitwiseBinaryOperation {
HSar(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
- virtual Range* InferRange();
+ virtual Range* InferRange(Zone* zone);
+
+ static HInstruction* NewHSar(Zone* zone,
+ HValue* context,
+ HValue* left,
+ HValue* right);
DECLARE_CONCRETE_INSTRUCTION(Sar)
@@ -3128,7 +3384,7 @@ class HSar: public HBitwiseBinaryOperation {
class HOsrEntry: public HTemplateInstruction<0> {
public:
explicit HOsrEntry(int ast_id) : ast_id_(ast_id) {
- SetFlag(kChangesOsrEntries);
+ SetGVNFlag(kChangesOsrEntries);
}
int ast_id() const { return ast_id_; }
@@ -3200,13 +3456,27 @@ class HCallStub: public HUnaryCall {
class HUnknownOSRValue: public HTemplateInstruction<0> {
public:
- HUnknownOSRValue() { set_representation(Representation::Tagged()); }
+ HUnknownOSRValue()
+ : incoming_value_(NULL) {
+ set_representation(Representation::Tagged());
+ }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::None();
}
+ void set_incoming_value(HPhi* value) {
+ incoming_value_ = value;
+ }
+
+ HPhi* incoming_value() {
+ return incoming_value_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(UnknownOSRValue)
+
+ private:
+ HPhi* incoming_value_;
};
@@ -3216,7 +3486,7 @@ class HLoadGlobalCell: public HTemplateInstruction<0> {
: cell_(cell), details_(details) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnGlobalVars);
+ SetGVNFlag(kDependsOnGlobalVars);
}
Handle<JSGlobalPropertyCell> cell() const { return cell_; }
@@ -3280,7 +3550,7 @@ class HLoadGlobalGeneric: public HTemplateInstruction<2> {
};
-static inline bool StoringValueNeedsWriteBarrier(HValue* value) {
+inline bool StoringValueNeedsWriteBarrier(HValue* value) {
return !value->type().IsBoolean()
&& !value->type().IsSmi()
&& !(value->IsConstant() && HConstant::cast(value)->ImmortalImmovable());
@@ -3295,7 +3565,7 @@ class HStoreGlobalCell: public HUnaryOperation {
: HUnaryOperation(value),
cell_(cell),
details_(details) {
- SetFlag(kChangesGlobalVars);
+ SetGVNFlag(kChangesGlobalVars);
}
Handle<JSGlobalPropertyCell> cell() const { return cell_; }
@@ -3325,9 +3595,9 @@ class HStoreGlobalGeneric: public HTemplateInstruction<3> {
HValue* global_object,
Handle<Object> name,
HValue* value,
- bool strict_mode)
+ StrictModeFlag strict_mode_flag)
: name_(name),
- strict_mode_(strict_mode) {
+ strict_mode_flag_(strict_mode_flag) {
SetOperandAt(0, context);
SetOperandAt(1, global_object);
SetOperandAt(2, value);
@@ -3339,7 +3609,7 @@ class HStoreGlobalGeneric: public HTemplateInstruction<3> {
HValue* global_object() { return OperandAt(1); }
Handle<Object> name() const { return name_; }
HValue* value() { return OperandAt(2); }
- bool strict_mode() { return strict_mode_; }
+ StrictModeFlag strict_mode_flag() { return strict_mode_flag_; }
virtual void PrintDataTo(StringStream* stream);
@@ -3351,20 +3621,54 @@ class HStoreGlobalGeneric: public HTemplateInstruction<3> {
private:
Handle<Object> name_;
- bool strict_mode_;
+ StrictModeFlag strict_mode_flag_;
};
class HLoadContextSlot: public HUnaryOperation {
public:
- HLoadContextSlot(HValue* context , int slot_index)
- : HUnaryOperation(context), slot_index_(slot_index) {
+ enum Mode {
+ // Perform a normal load of the context slot without checking its value.
+ kNoCheck,
+ // Load and check the value of the context slot. Deoptimize if it's the
+ // hole value. This is used for checking for loading of uninitialized
+ // harmony bindings where we deoptimize into full-codegen generated code
+ // which will subsequently throw a reference error.
+ kCheckDeoptimize,
+ // Load and check the value of the context slot. Return undefined if it's
+ // the hole value. This is used for non-harmony const assignments
+ kCheckReturnUndefined
+ };
+
+ HLoadContextSlot(HValue* context, Variable* var)
+ : HUnaryOperation(context), slot_index_(var->index()) {
+ ASSERT(var->IsContextSlot());
+ switch (var->mode()) {
+ case LET:
+ case CONST_HARMONY:
+ mode_ = kCheckDeoptimize;
+ break;
+ case CONST:
+ mode_ = kCheckReturnUndefined;
+ break;
+ default:
+ mode_ = kNoCheck;
+ }
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnContextSlots);
+ SetGVNFlag(kDependsOnContextSlots);
}
int slot_index() const { return slot_index_; }
+ Mode mode() const { return mode_; }
+
+ bool DeoptimizesOnHole() {
+ return mode_ == kCheckDeoptimize;
+ }
+
+ bool RequiresHoleCheck() {
+ return mode_ != kNoCheck;
+ }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -3382,26 +3686,49 @@ class HLoadContextSlot: public HUnaryOperation {
private:
int slot_index_;
+ Mode mode_;
};
class HStoreContextSlot: public HTemplateInstruction<2> {
public:
- HStoreContextSlot(HValue* context, int slot_index, HValue* value)
- : slot_index_(slot_index) {
+ enum Mode {
+ // Perform a normal store to the context slot without checking its previous
+ // value.
+ kNoCheck,
+ // Check the previous value of the context slot and deoptimize if it's the
+ // hole value. This is used for checking for assignments to uninitialized
+ // harmony bindings where we deoptimize into full-codegen generated code
+ // which will subsequently throw a reference error.
+ kCheckDeoptimize,
+ // Check the previous value and ignore assignment if it isn't a hole value
+ kCheckIgnoreAssignment
+ };
+
+ HStoreContextSlot(HValue* context, int slot_index, Mode mode, HValue* value)
+ : slot_index_(slot_index), mode_(mode) {
SetOperandAt(0, context);
SetOperandAt(1, value);
- SetFlag(kChangesContextSlots);
+ SetGVNFlag(kChangesContextSlots);
}
HValue* context() { return OperandAt(0); }
HValue* value() { return OperandAt(1); }
int slot_index() const { return slot_index_; }
+ Mode mode() const { return mode_; }
bool NeedsWriteBarrier() {
return StoringValueNeedsWriteBarrier(value());
}
+ bool DeoptimizesOnHole() {
+ return mode_ == kCheckDeoptimize;
+ }
+
+ bool RequiresHoleCheck() {
+ return mode_ != kNoCheck;
+ }
+
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
}
@@ -3412,6 +3739,7 @@ class HStoreContextSlot: public HTemplateInstruction<2> {
private:
int slot_index_;
+ Mode mode_;
};
@@ -3423,11 +3751,11 @@ class HLoadNamedField: public HUnaryOperation {
offset_(offset) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
if (is_in_object) {
- SetFlag(kDependsOnInobjectFields);
+ SetGVNFlag(kDependsOnInobjectFields);
} else {
- SetFlag(kDependsOnBackingStoreFields);
+ SetGVNFlag(kDependsOnBackingStoreFields);
}
}
@@ -3521,7 +3849,7 @@ class HLoadFunctionPrototype: public HUnaryOperation {
: HUnaryOperation(function) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnCalls);
+ SetGVNFlag(kDependsOnCalls);
}
HValue* function() { return OperandAt(0); }
@@ -3539,11 +3867,16 @@ class HLoadFunctionPrototype: public HUnaryOperation {
class HLoadKeyedFastElement: public HTemplateInstruction<2> {
public:
- HLoadKeyedFastElement(HValue* obj, HValue* key) {
+ enum HoleCheckMode { PERFORM_HOLE_CHECK, OMIT_HOLE_CHECK };
+
+ HLoadKeyedFastElement(HValue* obj,
+ HValue* key,
+ HoleCheckMode hole_check_mode = PERFORM_HOLE_CHECK)
+ : hole_check_mode_(hole_check_mode) {
SetOperandAt(0, obj);
SetOperandAt(1, key);
set_representation(Representation::Tagged());
- SetFlag(kDependsOnArrayElements);
+ SetGVNFlag(kDependsOnArrayElements);
SetFlag(kUseGVN);
}
@@ -3564,7 +3897,14 @@ class HLoadKeyedFastElement: public HTemplateInstruction<2> {
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedFastElement)
protected:
- virtual bool DataEquals(HValue* other) { return true; }
+ virtual bool DataEquals(HValue* other) {
+ if (!other->IsLoadKeyedFastElement()) return false;
+ HLoadKeyedFastElement* other_load = HLoadKeyedFastElement::cast(other);
+ return hole_check_mode_ == other_load->hole_check_mode_;
+ }
+
+ private:
+ HoleCheckMode hole_check_mode_;
};
@@ -3574,7 +3914,7 @@ class HLoadKeyedFastDoubleElement: public HTemplateInstruction<2> {
SetOperandAt(0, elements);
SetOperandAt(1, key);
set_representation(Representation::Double());
- SetFlag(kDependsOnDoubleArrayElements);
+ SetGVNFlag(kDependsOnDoubleArrayElements);
SetFlag(kUseGVN);
}
@@ -3611,9 +3951,9 @@ class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
} else {
set_representation(Representation::Integer32());
}
- SetFlag(kDependsOnSpecializedArrayElements);
+ SetGVNFlag(kDependsOnSpecializedArrayElements);
// Native code could change the specialized array.
- SetFlag(kDependsOnCalls);
+ SetGVNFlag(kDependsOnCalls);
SetFlag(kUseGVN);
}
@@ -3631,6 +3971,8 @@ class HLoadKeyedSpecializedArrayElement: public HTemplateInstruction<2> {
HValue* key() { return OperandAt(1); }
ElementsKind elements_kind() const { return elements_kind_; }
+ virtual Range* InferRange(Zone* zone);
+
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement)
protected:
@@ -3666,6 +4008,8 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> {
return Representation::Tagged();
}
+ virtual HValue* Canonicalize();
+
DECLARE_CONCRETE_INSTRUCTION(LoadKeyedGeneric)
};
@@ -3683,9 +4027,9 @@ class HStoreNamedField: public HTemplateInstruction<2> {
SetOperandAt(0, obj);
SetOperandAt(1, val);
if (is_in_object_) {
- SetFlag(kChangesInobjectFields);
+ SetGVNFlag(kChangesInobjectFields);
} else {
- SetFlag(kChangesBackingStoreFields);
+ SetGVNFlag(kChangesBackingStoreFields);
}
}
@@ -3760,7 +4104,7 @@ class HStoreKeyedFastElement: public HTemplateInstruction<3> {
SetOperandAt(0, obj);
SetOperandAt(1, key);
SetOperandAt(2, val);
- SetFlag(kChangesArrayElements);
+ SetGVNFlag(kChangesArrayElements);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -3785,10 +4129,6 @@ class HStoreKeyedFastElement: public HTemplateInstruction<3> {
}
}
- bool ValueNeedsSmiCheck() {
- return value_is_smi();
- }
-
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedFastElement)
@@ -3806,7 +4146,7 @@ class HStoreKeyedFastDoubleElement: public HTemplateInstruction<3> {
SetOperandAt(0, elements);
SetOperandAt(1, key);
SetOperandAt(2, val);
- SetFlag(kChangesDoubleArrayElements);
+ SetGVNFlag(kChangesDoubleArrayElements);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -3840,7 +4180,7 @@ class HStoreKeyedSpecializedArrayElement: public HTemplateInstruction<3> {
HValue* val,
ElementsKind elements_kind)
: elements_kind_(elements_kind) {
- SetFlag(kChangesSpecializedArrayElements);
+ SetGVNFlag(kChangesSpecializedArrayElements);
SetOperandAt(0, external_elements);
SetOperandAt(1, key);
SetOperandAt(2, val);
@@ -3881,8 +4221,8 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> {
HValue* object,
HValue* key,
HValue* value,
- bool strict_mode)
- : strict_mode_(strict_mode) {
+ StrictModeFlag strict_mode_flag)
+ : strict_mode_flag_(strict_mode_flag) {
SetOperandAt(0, object);
SetOperandAt(1, key);
SetOperandAt(2, value);
@@ -3894,7 +4234,7 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> {
HValue* key() { return OperandAt(1); }
HValue* value() { return OperandAt(2); }
HValue* context() { return OperandAt(3); }
- bool strict_mode() { return strict_mode_; }
+ StrictModeFlag strict_mode_flag() { return strict_mode_flag_; }
virtual Representation RequiredInputRepresentation(int index) {
return Representation::Tagged();
@@ -3905,7 +4245,7 @@ class HStoreKeyedGeneric: public HTemplateInstruction<4> {
DECLARE_CONCRETE_INSTRUCTION(StoreKeyedGeneric)
private:
- bool strict_mode_;
+ StrictModeFlag strict_mode_flag_;
};
@@ -3918,7 +4258,8 @@ class HTransitionElementsKind: public HTemplateInstruction<1> {
transitioned_map_(transitioned_map) {
SetOperandAt(0, object);
SetFlag(kUseGVN);
- SetFlag(kChangesElementsKind);
+ SetGVNFlag(kChangesElementsKind);
+ SetGVNFlag(kChangesElementsPointer);
set_representation(Representation::Tagged());
}
@@ -3953,7 +4294,7 @@ class HStringAdd: public HBinaryOperation {
: HBinaryOperation(context, left, right) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -3979,7 +4320,7 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
SetOperandAt(2, index);
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -3998,8 +4339,8 @@ class HStringCharCodeAt: public HTemplateInstruction<3> {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange() {
- return new Range(0, String::kMaxUC16CharCode);
+ virtual Range* InferRange(Zone* zone) {
+ return new(zone) Range(0, String::kMaxUtf16CodeUnit);
}
};
@@ -4034,7 +4375,7 @@ class HStringLength: public HUnaryOperation {
explicit HStringLength(HValue* string) : HUnaryOperation(string) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
- SetFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnMaps);
}
virtual Representation RequiredInputRepresentation(int index) {
@@ -4051,9 +4392,35 @@ class HStringLength: public HUnaryOperation {
protected:
virtual bool DataEquals(HValue* other) { return true; }
- virtual Range* InferRange() {
- return new Range(0, String::kMaxLength);
+ virtual Range* InferRange(Zone* zone) {
+ return new(zone) Range(0, String::kMaxLength);
+ }
+};
+
+
+class HAllocateObject: public HTemplateInstruction<1> {
+ public:
+ HAllocateObject(HValue* context, Handle<JSFunction> constructor)
+ : constructor_(constructor) {
+ SetOperandAt(0, context);
+ set_representation(Representation::Tagged());
+ }
+
+ // Maximum instance size for which allocations will be inlined.
+ static const int kMaxSize = 64 * kPointerSize;
+
+ HValue* context() { return OperandAt(0); }
+ Handle<JSFunction> constructor() { return constructor_; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
}
+ virtual HType CalculateInferredType();
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject)
+
+ private:
+ Handle<JSFunction> constructor_;
};
@@ -4074,21 +4441,62 @@ class HMaterializedLiteral: public HTemplateInstruction<V> {
};
+class HFastLiteral: public HMaterializedLiteral<1> {
+ public:
+ HFastLiteral(HValue* context,
+ Handle<JSObject> boilerplate,
+ int total_size,
+ int literal_index,
+ int depth)
+ : HMaterializedLiteral<1>(literal_index, depth),
+ boilerplate_(boilerplate),
+ total_size_(total_size) {
+ SetOperandAt(0, context);
+ }
+
+ // Maximum depth and total number of elements and properties for literal
+ // graphs to be considered for fast deep-copying.
+ static const int kMaxLiteralDepth = 3;
+ static const int kMaxLiteralProperties = 8;
+
+ HValue* context() { return OperandAt(0); }
+ Handle<JSObject> boilerplate() const { return boilerplate_; }
+ int total_size() const { return total_size_; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+ virtual HType CalculateInferredType();
+
+ DECLARE_CONCRETE_INSTRUCTION(FastLiteral)
+
+ private:
+ Handle<JSObject> boilerplate_;
+ int total_size_;
+};
+
+
class HArrayLiteral: public HMaterializedLiteral<1> {
public:
HArrayLiteral(HValue* context,
- Handle<FixedArray> constant_elements,
+ Handle<HeapObject> boilerplate_object,
int length,
int literal_index,
int depth)
: HMaterializedLiteral<1>(literal_index, depth),
length_(length),
- constant_elements_(constant_elements) {
+ boilerplate_object_(boilerplate_object) {
SetOperandAt(0, context);
}
HValue* context() { return OperandAt(0); }
- Handle<FixedArray> constant_elements() const { return constant_elements_; }
+ ElementsKind boilerplate_elements_kind() const {
+ if (!boilerplate_object_->IsJSObject()) {
+ return FAST_ELEMENTS;
+ }
+ return Handle<JSObject>::cast(boilerplate_object_)->GetElementsKind();
+ }
+ Handle<HeapObject> boilerplate_object() const { return boilerplate_object_; }
int length() const { return length_; }
bool IsCopyOnWrite() const;
@@ -4102,7 +4510,7 @@ class HArrayLiteral: public HMaterializedLiteral<1> {
private:
int length_;
- Handle<FixedArray> constant_elements_;
+ Handle<HeapObject> boilerplate_object_;
};
@@ -4152,6 +4560,7 @@ class HRegExpLiteral: public HMaterializedLiteral<1> {
pattern_(pattern),
flags_(flags) {
SetOperandAt(0, context);
+ SetAllSideEffects();
}
HValue* context() { return OperandAt(0); }
@@ -4227,7 +4636,7 @@ class HToFastProperties: public HUnaryOperation {
// This instruction is not marked as having side effects, but
// changes the map of the input operand. Use it only when creating
// object literals.
- ASSERT(value->IsObjectLiteral());
+ ASSERT(value->IsObjectLiteral() || value->IsFastLiteral());
set_representation(Representation::Tagged());
}
@@ -4253,6 +4662,26 @@ class HValueOf: public HUnaryOperation {
};
+class HDateField: public HUnaryOperation {
+ public:
+ HDateField(HValue* date, Smi* index)
+ : HUnaryOperation(date), index_(index) {
+ set_representation(Representation::Tagged());
+ }
+
+ Smi* index() const { return index_; }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField)
+
+ private:
+ Smi* index_;
+};
+
+
class HDeleteProperty: public HBinaryOperation {
public:
HDeleteProperty(HValue* context, HValue* obj, HValue* key)
@@ -4301,6 +4730,134 @@ class HIn: public HTemplateInstruction<3> {
DECLARE_CONCRETE_INSTRUCTION(In)
};
+
+class HCheckMapValue: public HTemplateInstruction<2> {
+ public:
+ HCheckMapValue(HValue* value,
+ HValue* map) {
+ SetOperandAt(0, value);
+ SetOperandAt(1, map);
+ set_representation(Representation::Tagged());
+ SetFlag(kUseGVN);
+ SetGVNFlag(kDependsOnMaps);
+ SetGVNFlag(kDependsOnElementsKind);
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual HType CalculateInferredType() {
+ return HType::Tagged();
+ }
+
+ HValue* value() { return OperandAt(0); }
+ HValue* map() { return OperandAt(1); }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue)
+
+ protected:
+ virtual bool DataEquals(HValue* other) {
+ return true;
+ }
+};
+
+
+class HForInPrepareMap : public HTemplateInstruction<2> {
+ public:
+ HForInPrepareMap(HValue* context,
+ HValue* object) {
+ SetOperandAt(0, context);
+ SetOperandAt(1, object);
+ set_representation(Representation::Tagged());
+ SetAllSideEffects();
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ HValue* context() { return OperandAt(0); }
+ HValue* enumerable() { return OperandAt(1); }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual HType CalculateInferredType() {
+ return HType::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap);
+};
+
+
+class HForInCacheArray : public HTemplateInstruction<2> {
+ public:
+ HForInCacheArray(HValue* enumerable,
+ HValue* keys,
+ int idx) : idx_(idx) {
+ SetOperandAt(0, enumerable);
+ SetOperandAt(1, keys);
+ set_representation(Representation::Tagged());
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ HValue* enumerable() { return OperandAt(0); }
+ HValue* map() { return OperandAt(1); }
+ int idx() { return idx_; }
+
+ HForInCacheArray* index_cache() {
+ return index_cache_;
+ }
+
+ void set_index_cache(HForInCacheArray* index_cache) {
+ index_cache_ = index_cache;
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual HType CalculateInferredType() {
+ return HType::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray);
+
+ private:
+ int idx_;
+ HForInCacheArray* index_cache_;
+};
+
+
+class HLoadFieldByIndex : public HTemplateInstruction<2> {
+ public:
+ HLoadFieldByIndex(HValue* object,
+ HValue* index) {
+ SetOperandAt(0, object);
+ SetOperandAt(1, index);
+ set_representation(Representation::Tagged());
+ }
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return Representation::Tagged();
+ }
+
+ HValue* object() { return OperandAt(0); }
+ HValue* index() { return OperandAt(1); }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ virtual HType CalculateInferredType() {
+ return HType::Tagged();
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex);
+};
+
+
#undef DECLARE_INSTRUCTION
#undef DECLARE_CONCRETE_INSTRUCTION
diff --git a/src/3rdparty/v8/src/hydrogen.cc b/src/3rdparty/v8/src/hydrogen.cc
index 4c716ef..f9d4191 100644
--- a/src/3rdparty/v8/src/hydrogen.cc
+++ b/src/3rdparty/v8/src/hydrogen.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -70,7 +70,8 @@ HBasicBlock::HBasicBlock(HGraph* graph)
deleted_phis_(4),
parent_loop_header_(NULL),
is_inline_return_target_(false),
- is_deoptimizing_(false) { }
+ is_deoptimizing_(false),
+ dominates_loop_successors_(false) { }
void HBasicBlock::AttachLoopInformation() {
@@ -96,7 +97,7 @@ void HBasicBlock::RemovePhi(HPhi* phi) {
ASSERT(phi->block() == this);
ASSERT(phis_.Contains(phi));
ASSERT(phi->HasNoUses() || !phi->is_live());
- phi->ClearOperands();
+ phi->Kill();
phis_.RemoveElement(phi);
phi->SetBlock(NULL);
}
@@ -167,8 +168,7 @@ void HBasicBlock::Finish(HControlInstruction* end) {
void HBasicBlock::Goto(HBasicBlock* block, bool drop_extra) {
if (block->IsInlineReturnTarget()) {
AddInstruction(new(zone()) HLeaveInlined);
- last_environment_ = last_environment()->outer();
- if (drop_extra) last_environment_->Drop(1);
+ last_environment_ = last_environment()->DiscardInlined(drop_extra);
}
AddSimulate(AstNode::kNoNumber);
HGoto* instr = new(zone()) HGoto(block);
@@ -182,8 +182,7 @@ void HBasicBlock::AddLeaveInlined(HValue* return_value,
ASSERT(target->IsInlineReturnTarget());
ASSERT(return_value != NULL);
AddInstruction(new(zone()) HLeaveInlined);
- last_environment_ = last_environment()->outer();
- if (drop_extra) last_environment_->Drop(1);
+ last_environment_ = last_environment()->DiscardInlined(drop_extra);
last_environment()->Push(return_value);
AddSimulate(AstNode::kNoNumber);
HGoto* instr = new(zone()) HGoto(target);
@@ -317,6 +316,62 @@ void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
}
+void HBasicBlock::AssignLoopSuccessorDominators() {
+ // Mark blocks that dominate all subsequent reachable blocks inside their
+ // loop. Exploit the fact that blocks are sorted in reverse post order. When
+ // the loop is visited in increasing block id order, if the number of
+ // non-loop-exiting successor edges at the dominator_candidate block doesn't
+ // exceed the number of previously encountered predecessor edges, there is no
+ // path from the loop header to any block with higher id that doesn't go
+ // through the dominator_candidate block. In this case, the
+ // dominator_candidate block is guaranteed to dominate all blocks reachable
+ // from it with higher ids.
+ HBasicBlock* last = loop_information()->GetLastBackEdge();
+ int outstanding_successors = 1; // one edge from the pre-header
+ // Header always dominates everything.
+ MarkAsLoopSuccessorDominator();
+ for (int j = block_id(); j <= last->block_id(); ++j) {
+ HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
+ for (HPredecessorIterator it(dominator_candidate); !it.Done();
+ it.Advance()) {
+ HBasicBlock* predecessor = it.Current();
+ // Don't count back edges.
+ if (predecessor->block_id() < dominator_candidate->block_id()) {
+ outstanding_successors--;
+ }
+ }
+
+ // If more successors than predecessors have been seen in the loop up to
+ // now, it's not possible to guarantee that the current block dominates
+ // all of the blocks with higher IDs. In this case, assume conservatively
+ // that those paths through loop that don't go through the current block
+ // contain all of the loop's dependencies. Also be careful to record
+ // dominator information about the current loop that's being processed,
+ // and not nested loops, which will be processed when
+ // AssignLoopSuccessorDominators gets called on their header.
+ ASSERT(outstanding_successors >= 0);
+ HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
+ if (outstanding_successors == 0 &&
+ (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
+ dominator_candidate->MarkAsLoopSuccessorDominator();
+ }
+ HControlInstruction* end = dominator_candidate->end();
+ for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
+ HBasicBlock* successor = it.Current();
+ // Only count successors that remain inside the loop and don't loop back
+ // to a loop header.
+ if (successor->block_id() > dominator_candidate->block_id() &&
+ successor->block_id() <= last->block_id()) {
+ // Backwards edges must land on loop headers.
+ ASSERT(successor->block_id() > dominator_candidate->block_id() ||
+ successor->IsLoopHeader());
+ outstanding_successors++;
+ }
+ }
+ }
+}
+
+
int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
for (int i = 0; i < predecessors_.length(); ++i) {
if (predecessors_[i] == predecessor) return i;
@@ -391,7 +446,7 @@ class ReachabilityAnalyzer BASE_EMBEDDED {
HBasicBlock* dont_visit)
: visited_count_(0),
stack_(16),
- reachable_(block_count),
+ reachable_(block_count, ZONE),
dont_visit_(dont_visit) {
PushBlock(entry_block);
Analyze();
@@ -545,7 +600,7 @@ HConstant* HGraph::GetConstantHole() {
HGraphBuilder::HGraphBuilder(CompilationInfo* info,
TypeFeedbackOracle* oracle)
: function_state_(NULL),
- initial_function_state_(this, info, oracle, false),
+ initial_function_state_(this, info, oracle, NORMAL_RETURN),
ast_context_(NULL),
break_scope_(NULL),
graph_(NULL),
@@ -627,28 +682,28 @@ HGraph::HGraph(CompilationInfo* info)
Handle<Code> HGraph::Compile(CompilationInfo* info) {
int values = GetMaximumValueID();
- if (values > LAllocator::max_initial_value_ids()) {
- if (FLAG_trace_bailout) PrintF("Function is too big\n");
+ if (values > LUnallocated::kMaxVirtualRegisters) {
+ if (FLAG_trace_bailout) {
+ PrintF("Not enough virtual registers for (values).\n");
+ }
return Handle<Code>::null();
}
-
LAllocator allocator(values, this);
LChunkBuilder builder(info, this, &allocator);
LChunk* chunk = builder.Build();
if (chunk == NULL) return Handle<Code>::null();
- if (!FLAG_alloc_lithium) return Handle<Code>::null();
-
- allocator.Allocate(chunk);
-
- if (!FLAG_use_lithium) return Handle<Code>::null();
+ if (!allocator.Allocate(chunk)) {
+ if (FLAG_trace_bailout) {
+ PrintF("Not enough virtual registers (regalloc).\n");
+ }
+ return Handle<Code>::null();
+ }
MacroAssembler assembler(info->isolate(), NULL, 0);
LCodeGen generator(chunk, &assembler, info);
- if (FLAG_eliminate_empty_blocks) {
- chunk->MarkEmptyBlocks();
- }
+ chunk->MarkEmptyBlocks();
if (generator.GenerateCode()) {
if (FLAG_trace_codegen) {
@@ -675,7 +730,7 @@ HBasicBlock* HGraph::CreateBasicBlock() {
void HGraph::Canonicalize() {
if (!FLAG_use_canonicalizing) return;
- HPhase phase("Canonicalize", this);
+ HPhase phase("H_Canonicalize", this);
for (int i = 0; i < blocks()->length(); ++i) {
HInstruction* instr = blocks()->at(i)->first();
while (instr != NULL) {
@@ -688,8 +743,8 @@ void HGraph::Canonicalize() {
void HGraph::OrderBlocks() {
- HPhase phase("Block ordering");
- BitVector visited(blocks_.length());
+ HPhase phase("H_Block ordering");
+ BitVector visited(blocks_.length(), zone());
ZoneList<HBasicBlock*> reverse_result(8);
HBasicBlock* start = blocks_[0];
@@ -734,6 +789,7 @@ void HGraph::Postorder(HBasicBlock* block,
Postorder(it.Current(), visited, order, block);
}
} else {
+ ASSERT(block->IsFinished());
for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
Postorder(it.Current(), visited, order, loop_header);
}
@@ -749,14 +805,16 @@ void HGraph::Postorder(HBasicBlock* block,
void HGraph::AssignDominators() {
- HPhase phase("Assign dominators", this);
+ HPhase phase("H_Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
- if (blocks_[i]->IsLoopHeader()) {
+ HBasicBlock* block = blocks_[i];
+ if (block->IsLoopHeader()) {
// Only the first predecessor of a loop header is from outside the loop.
// All others are back edges, and thus cannot dominate the loop header.
- blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
+ block->AssignCommonDominator(block->predecessors()->first());
+ block->AssignLoopSuccessorDominators();
} else {
- for (int j = 0; j < blocks_[i]->predecessors()->length(); ++j) {
+ for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
}
}
@@ -766,7 +824,7 @@ void HGraph::AssignDominators() {
// Mark all blocks that are dominated by an unconditional soft deoptimize to
// prevent code motion across those blocks.
void HGraph::PropagateDeoptimizingMark() {
- HPhase phase("Propagate deoptimizing mark", this);
+ HPhase phase("H_Propagate deoptimizing mark", this);
MarkAsDeoptimizingRecursively(entry_block());
}
@@ -779,7 +837,7 @@ void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
}
void HGraph::EliminateRedundantPhis() {
- HPhase phase("Redundant phi elimination", this);
+ HPhase phase("H_Redundant phi elimination", this);
// Worklist of phis that can potentially be eliminated. Initialized with
// all phi nodes. When elimination of a phi node modifies another phi node
@@ -813,7 +871,7 @@ void HGraph::EliminateRedundantPhis() {
void HGraph::EliminateUnreachablePhis() {
- HPhase phase("Unreachable phi elimination", this);
+ HPhase phase("H_Unreachable phi elimination", this);
// Initialize worklist.
ZoneList<HPhi*> phi_list(blocks_.length());
@@ -897,7 +955,7 @@ void HGraph::CollectPhis() {
void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
- BitVector in_worklist(GetMaximumValueID());
+ BitVector in_worklist(GetMaximumValueID(), zone());
for (int i = 0; i < worklist->length(); ++i) {
ASSERT(!in_worklist.Contains(worklist->at(i)->id()));
in_worklist.Add(worklist->at(i)->id());
@@ -921,7 +979,8 @@ void HGraph::InferTypes(ZoneList<HValue*>* worklist) {
class HRangeAnalysis BASE_EMBEDDED {
public:
- explicit HRangeAnalysis(HGraph* graph) : graph_(graph), changed_ranges_(16) {}
+ explicit HRangeAnalysis(HGraph* graph) :
+ graph_(graph), zone_(graph->isolate()->zone()), changed_ranges_(16) { }
void Analyze();
@@ -935,6 +994,7 @@ class HRangeAnalysis BASE_EMBEDDED {
void AddRange(HValue* value, Range* range);
HGraph* graph_;
+ Zone* zone_;
ZoneList<HValue*> changed_ranges_;
};
@@ -950,7 +1010,7 @@ void HRangeAnalysis::TraceRange(const char* msg, ...) {
void HRangeAnalysis::Analyze() {
- HPhase phase("Range analysis", graph_);
+ HPhase phase("H_Range analysis", graph_);
Analyze(graph_->entry_block());
}
@@ -1021,14 +1081,14 @@ void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
if (op == Token::EQ || op == Token::EQ_STRICT) {
// The same range has to apply for value.
- new_range = range->Copy();
+ new_range = range->Copy(zone_);
} else if (op == Token::LT || op == Token::LTE) {
- new_range = range->CopyClearLower();
+ new_range = range->CopyClearLower(zone_);
if (op == Token::LT) {
new_range->AddConstant(-1);
}
} else if (op == Token::GT || op == Token::GTE) {
- new_range = range->CopyClearUpper();
+ new_range = range->CopyClearUpper(zone_);
if (op == Token::GT) {
new_range->AddConstant(1);
}
@@ -1043,7 +1103,7 @@ void HRangeAnalysis::UpdateControlFlowRange(Token::Value op,
void HRangeAnalysis::InferRange(HValue* value) {
ASSERT(!value->HasRange());
if (!value->representation().IsNone()) {
- value->ComputeInitialRange();
+ value->ComputeInitialRange(zone_);
Range* range = value->range();
TraceRange("Initial inferred range of %d (%s) set to [%d,%d]\n",
value->id(),
@@ -1064,7 +1124,7 @@ void HRangeAnalysis::RollBackTo(int index) {
void HRangeAnalysis::AddRange(HValue* value, Range* range) {
Range* original_range = value->range();
- value->AddNewRange(range);
+ value->AddNewRange(range, zone_);
changed_ranges_.Add(value);
Range* new_range = value->range();
TraceRange("Updated range of %d set to [%d,%d]\n",
@@ -1105,10 +1165,10 @@ HValueMap::HValueMap(Zone* zone, const HValueMap* other)
}
-void HValueMap::Kill(int flags) {
- int depends_flags = HValue::ConvertChangesToDependsFlags(flags);
- if ((present_flags_ & depends_flags) == 0) return;
- present_flags_ = 0;
+void HValueMap::Kill(GVNFlagSet flags) {
+ GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags);
+ if (!present_flags_.ContainsAnyOf(depends_flags)) return;
+ present_flags_.RemoveAll();
for (int i = 0; i < array_size_; ++i) {
HValue* value = array_[i].value;
if (value != NULL) {
@@ -1117,7 +1177,8 @@ void HValueMap::Kill(int flags) {
int next;
for (int current = array_[i].next; current != kNil; current = next) {
next = lists_[current].next;
- if ((lists_[current].value->flags() & depends_flags) != 0) {
+ HValue* value = lists_[current].value;
+ if (value->gvn_flags().ContainsAnyOf(depends_flags)) {
// Drop it.
count_--;
lists_[current].next = free_list_head_;
@@ -1126,13 +1187,14 @@ void HValueMap::Kill(int flags) {
// Keep it.
lists_[current].next = kept;
kept = current;
- present_flags_ |= lists_[current].value->flags();
+ present_flags_.Add(value->gvn_flags());
}
}
array_[i].next = kept;
// Now possibly drop directly indexed element.
- if ((array_[i].value->flags() & depends_flags) != 0) { // Drop it.
+ value = array_[i].value;
+ if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
count_--;
int head = array_[i].next;
if (head == kNil) {
@@ -1144,7 +1206,7 @@ void HValueMap::Kill(int flags) {
free_list_head_ = head;
}
} else {
- present_flags_ |= array_[i].value->flags(); // Keep it.
+ present_flags_.Add(value->gvn_flags()); // Keep it.
}
}
}
@@ -1351,8 +1413,8 @@ class HGlobalValueNumberer BASE_EMBEDDED {
loop_side_effects_(graph->blocks()->length()),
visited_on_paths_(graph->zone(), graph->blocks()->length()) {
ASSERT(info->isolate()->heap()->allow_allocation(false));
- block_side_effects_.AddBlock(0, graph_->blocks()->length());
- loop_side_effects_.AddBlock(0, graph_->blocks()->length());
+ block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
+ loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length());
}
~HGlobalValueNumberer() {
ASSERT(!info_->isolate()->heap()->allow_allocation(true));
@@ -1362,14 +1424,17 @@ class HGlobalValueNumberer BASE_EMBEDDED {
bool Analyze();
private:
- int CollectSideEffectsOnPathsToDominatedBlock(HBasicBlock* dominator,
- HBasicBlock* dominated);
+ GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock(
+ HBasicBlock* dominator,
+ HBasicBlock* dominated);
void AnalyzeBlock(HBasicBlock* block, HValueMap* map);
void ComputeBlockSideEffects();
void LoopInvariantCodeMotion();
void ProcessLoopBlock(HBasicBlock* block,
HBasicBlock* before_loop,
- int loop_kills);
+ GVNFlagSet loop_kills,
+ GVNFlagSet* accumulated_first_time_depends,
+ GVNFlagSet* accumulated_first_time_changes);
bool AllowCodeMotion();
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header);
@@ -1382,10 +1447,10 @@ class HGlobalValueNumberer BASE_EMBEDDED {
bool removed_side_effects_;
// A map of block IDs to their side effects.
- ZoneList<int> block_side_effects_;
+ ZoneList<GVNFlagSet> block_side_effects_;
// A map of loop header block IDs to their loop's side effects.
- ZoneList<int> loop_side_effects_;
+ ZoneList<GVNFlagSet> loop_side_effects_;
// Used when collecting side effects on paths from dominator to
// dominated.
@@ -1394,6 +1459,7 @@ class HGlobalValueNumberer BASE_EMBEDDED {
bool HGlobalValueNumberer::Analyze() {
+ removed_side_effects_ = false;
ComputeBlockSideEffects();
if (FLAG_loop_invariant_code_motion) {
LoopInvariantCodeMotion();
@@ -1405,28 +1471,40 @@ bool HGlobalValueNumberer::Analyze() {
void HGlobalValueNumberer::ComputeBlockSideEffects() {
+ // The Analyze phase of GVN can be called multiple times. Clear loop side
+ // effects before computing them to erase the contents from previous Analyze
+ // passes.
+ for (int i = 0; i < loop_side_effects_.length(); ++i) {
+ loop_side_effects_[i].RemoveAll();
+ }
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
// Compute side effects for the block.
HBasicBlock* block = graph_->blocks()->at(i);
HInstruction* instr = block->first();
int id = block->block_id();
- int side_effects = 0;
+ GVNFlagSet side_effects;
while (instr != NULL) {
- side_effects |= instr->ChangesFlags();
+ side_effects.Add(instr->ChangesFlags());
+ if (instr->IsSoftDeoptimize()) {
+ block_side_effects_[id].RemoveAll();
+ side_effects.RemoveAll();
+ break;
+ }
instr = instr->next();
}
- block_side_effects_[id] |= side_effects;
+ block_side_effects_[id].Add(side_effects);
// Loop headers are part of their loop.
if (block->IsLoopHeader()) {
- loop_side_effects_[id] |= side_effects;
+ loop_side_effects_[id].Add(side_effects);
}
// Propagate loop side effects upwards.
if (block->HasParentLoopHeader()) {
int header_id = block->parent_loop_header()->block_id();
- loop_side_effects_[header_id] |=
- block->IsLoopHeader() ? loop_side_effects_[id] : side_effects;
+ loop_side_effects_[header_id].Add(block->IsLoopHeader()
+ ? loop_side_effects_[id]
+ : side_effects);
}
}
}
@@ -1436,50 +1514,114 @@ void HGlobalValueNumberer::LoopInvariantCodeMotion() {
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) {
HBasicBlock* block = graph_->blocks()->at(i);
if (block->IsLoopHeader()) {
- int side_effects = loop_side_effects_[block->block_id()];
+ GVNFlagSet side_effects = loop_side_effects_[block->block_id()];
TraceGVN("Try loop invariant motion for block B%d effects=0x%x\n",
block->block_id(),
- side_effects);
+ side_effects.ToIntegral());
+ GVNFlagSet accumulated_first_time_depends;
+ GVNFlagSet accumulated_first_time_changes;
HBasicBlock* last = block->loop_information()->GetLastBackEdge();
for (int j = block->block_id(); j <= last->block_id(); ++j) {
- ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects);
+ ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects,
+ &accumulated_first_time_depends,
+ &accumulated_first_time_changes);
}
}
}
}
-void HGlobalValueNumberer::ProcessLoopBlock(HBasicBlock* block,
- HBasicBlock* loop_header,
- int loop_kills) {
+void HGlobalValueNumberer::ProcessLoopBlock(
+ HBasicBlock* block,
+ HBasicBlock* loop_header,
+ GVNFlagSet loop_kills,
+ GVNFlagSet* first_time_depends,
+ GVNFlagSet* first_time_changes) {
HBasicBlock* pre_header = loop_header->predecessors()->at(0);
- int depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
+ GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills);
TraceGVN("Loop invariant motion for B%d depends_flags=0x%x\n",
block->block_id(),
- depends_flags);
+ depends_flags.ToIntegral());
HInstruction* instr = block->first();
while (instr != NULL) {
HInstruction* next = instr->next();
- if (instr->CheckFlag(HValue::kUseGVN) &&
- (instr->flags() & depends_flags) == 0) {
- TraceGVN("Checking instruction %d (%s)\n",
+ bool hoisted = false;
+ if (instr->CheckFlag(HValue::kUseGVN)) {
+ TraceGVN("Checking instruction %d (%s) instruction GVN flags 0x%X, "
+ "loop kills 0x%X\n",
instr->id(),
- instr->Mnemonic());
- bool inputs_loop_invariant = true;
- for (int i = 0; i < instr->OperandCount(); ++i) {
- if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
- inputs_loop_invariant = false;
+ instr->Mnemonic(),
+ instr->gvn_flags().ToIntegral(),
+ depends_flags.ToIntegral());
+ bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags);
+ if (instr->IsTransitionElementsKind()) {
+ // It's possible to hoist transitions out of a loop as long as the
+ // hoisting wouldn't move the transition past a DependsOn of one of it's
+ // changes or any instructions that might change an objects map or
+ // elements contents.
+ GVNFlagSet changes = instr->ChangesFlags();
+ GVNFlagSet hoist_depends_blockers =
+ HValue::ConvertChangesToDependsFlags(changes);
+ // In addition to not hoisting transitions above other instructions that
+ // change dependencies that the transition changes, it must not be
+ // hoisted above map changes and stores to an elements backing store
+ // that the transition might change.
+ GVNFlagSet hoist_change_blockers = changes;
+ hoist_change_blockers.Add(kChangesMaps);
+ HTransitionElementsKind* trans = HTransitionElementsKind::cast(instr);
+ if (trans->original_map()->has_fast_double_elements()) {
+ hoist_change_blockers.Add(kChangesDoubleArrayElements);
}
+ if (trans->transitioned_map()->has_fast_double_elements()) {
+ hoist_change_blockers.Add(kChangesArrayElements);
+ }
+ TraceGVN("Checking dependencies on HTransitionElementsKind %d (%s) "
+ "hoist depends blockers 0x%X, hoist change blockers 0x%X, "
+ "accumulated depends 0x%X, accumulated changes 0x%X\n",
+ instr->id(),
+ instr->Mnemonic(),
+ hoist_depends_blockers.ToIntegral(),
+ hoist_change_blockers.ToIntegral(),
+ first_time_depends->ToIntegral(),
+ first_time_changes->ToIntegral());
+ // It's possible to hoist transition from the current loop loop only if
+ // they dominate all of the successor blocks in the same loop and there
+ // are not any instructions that have Changes/DependsOn that intervene
+ // between it and the beginning of the loop header.
+ bool in_nested_loop = block != loop_header &&
+ ((block->parent_loop_header() != loop_header) ||
+ block->IsLoopHeader());
+ can_hoist = !in_nested_loop &&
+ block->IsLoopSuccessorDominator() &&
+ !first_time_depends->ContainsAnyOf(hoist_depends_blockers) &&
+ !first_time_changes->ContainsAnyOf(hoist_change_blockers);
}
- if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
- TraceGVN("Found loop invariant instruction %d\n", instr->id());
- // Move the instruction out of the loop.
- instr->Unlink();
- instr->InsertBefore(pre_header->end());
+ if (can_hoist) {
+ bool inputs_loop_invariant = true;
+ for (int i = 0; i < instr->OperandCount(); ++i) {
+ if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
+ inputs_loop_invariant = false;
+ }
+ }
+
+ if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
+ TraceGVN("Hoisting loop invariant instruction %d\n", instr->id());
+ // Move the instruction out of the loop.
+ instr->Unlink();
+ instr->InsertBefore(pre_header->end());
+ if (instr->HasSideEffects()) removed_side_effects_ = true;
+ hoisted = true;
+ }
}
}
+ if (!hoisted) {
+ // If an instruction is not hoisted, we have to account for its side
+ // effects when hoisting later HTransitionElementsKind instructions.
+ first_time_depends->Add(instr->DependsOnFlags());
+ first_time_changes->Add(instr->ChangesFlags());
+ }
instr = next;
}
}
@@ -1498,20 +1640,20 @@ bool HGlobalValueNumberer::ShouldMove(HInstruction* instr,
}
-int HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
+GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock(
HBasicBlock* dominator, HBasicBlock* dominated) {
- int side_effects = 0;
+ GVNFlagSet side_effects;
for (int i = 0; i < dominated->predecessors()->length(); ++i) {
HBasicBlock* block = dominated->predecessors()->at(i);
if (dominator->block_id() < block->block_id() &&
block->block_id() < dominated->block_id() &&
visited_on_paths_.Add(block->block_id())) {
- side_effects |= block_side_effects_[block->block_id()];
+ side_effects.Add(block_side_effects_[block->block_id()]);
if (block->IsLoopHeader()) {
- side_effects |= loop_side_effects_[block->block_id()];
+ side_effects.Add(loop_side_effects_[block->block_id()]);
}
- side_effects |= CollectSideEffectsOnPathsToDominatedBlock(
- dominator, block);
+ side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
+ dominator, block));
}
}
return side_effects;
@@ -1532,8 +1674,8 @@ void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block, HValueMap* map) {
HInstruction* instr = block->first();
while (instr != NULL) {
HInstruction* next = instr->next();
- int flags = instr->ChangesFlags();
- if (flags != 0) {
+ GVNFlagSet flags = instr->ChangesFlags();
+ if (!flags.IsEmpty()) {
// Clear all instructions in the map that are affected by side effects.
map->Kill(flags);
TraceGVN("Instruction %d kills\n", instr->id());
@@ -1584,7 +1726,9 @@ void HGlobalValueNumberer::AnalyzeBlock(HBasicBlock* block, HValueMap* map) {
class HInferRepresentation BASE_EMBEDDED {
public:
explicit HInferRepresentation(HGraph* graph)
- : graph_(graph), worklist_(8), in_worklist_(graph->GetMaximumValueID()) {}
+ : graph_(graph),
+ worklist_(8),
+ in_worklist_(graph->GetMaximumValueID(), graph->zone()) { }
void Analyze();
@@ -1622,6 +1766,12 @@ void HInferRepresentation::InferBasedOnInputs(HValue* current) {
ASSERT(current->CheckFlag(HValue::kFlexibleRepresentation));
Representation inferred = current->InferredRepresentation();
if (inferred.IsSpecialization()) {
+ if (FLAG_trace_representation) {
+ PrintF("Changing #%d representation %s -> %s based on inputs\n",
+ current->id(),
+ r.Mnemonic(),
+ inferred.Mnemonic());
+ }
current->ChangeRepresentation(inferred);
AddDependantsToWorklist(current);
}
@@ -1649,6 +1799,12 @@ void HInferRepresentation::InferBasedOnUses(HValue* value) {
Representation new_rep = TryChange(value);
if (!new_rep.IsNone()) {
if (!value->representation().Equals(new_rep)) {
+ if (FLAG_trace_representation) {
+ PrintF("Changing #%d representation %s -> %s based on uses\n",
+ value->id(),
+ r.Mnemonic(),
+ new_rep.Mnemonic());
+ }
value->ChangeRepresentation(new_rep);
AddDependantsToWorklist(value);
}
@@ -1665,7 +1821,7 @@ Representation HInferRepresentation::TryChange(HValue* value) {
Representation rep = use->RequiredInputRepresentation(it.index());
if (rep.IsNone()) continue;
if (use->IsPhi()) HPhi::cast(use)->AddIndirectUsesTo(&use_count[0]);
- ++use_count[rep.kind()];
+ use_count[rep.kind()] += use->LoopWeight();
}
int tagged_count = use_count[Representation::kTagged];
int double_count = use_count[Representation::kDouble];
@@ -1678,7 +1834,7 @@ Representation HInferRepresentation::TryChange(HValue* value) {
}
// Prefer unboxing over boxing, the latter is more expensive.
- if (tagged_count > non_tagged_count) Representation::None();
+ if (tagged_count > non_tagged_count) return Representation::None();
// Prefer Integer32 over Double, if possible.
if (int32_count > 0 && value->IsConvertibleToInteger()) {
@@ -1692,7 +1848,7 @@ Representation HInferRepresentation::TryChange(HValue* value) {
void HInferRepresentation::Analyze() {
- HPhase phase("Infer representations", graph_);
+ HPhase phase("H_Infer representations", graph_);
// (1) Initialize bit vectors and count real uses. Each phi gets a
// bit-vector of length <number of phis>.
@@ -1701,7 +1857,7 @@ void HInferRepresentation::Analyze() {
ZoneList<BitVector*> connected_phis(phi_count);
for (int i = 0; i < phi_count; ++i) {
phi_list->at(i)->InitRealUses(i);
- BitVector* connected_set = new(zone()) BitVector(phi_count);
+ BitVector* connected_set = new(zone()) BitVector(phi_count, graph_->zone());
connected_set->Add(i);
connected_phis.Add(connected_set);
}
@@ -1771,7 +1927,7 @@ void HInferRepresentation::Analyze() {
void HGraph::InitializeInferredTypes() {
- HPhase phase("Inferring types", this);
+ HPhase phase("H_Inferring types", this);
InitializeInferredTypes(0, this->blocks_.length() - 1);
}
@@ -1908,8 +2064,7 @@ void HGraph::InsertRepresentationChangesForValue(HValue* value) {
void HGraph::InsertRepresentationChanges() {
- HPhase phase("Insert representation changes", this);
-
+ HPhase phase("H_Representation changes", this);
// Compute truncation flag for phis: Initially assume that all
// int32-phis allow truncation and iteratively remove the ones that
@@ -1928,13 +2083,9 @@ void HGraph::InsertRepresentationChanges() {
for (int i = 0; i < phi_list()->length(); i++) {
HPhi* phi = phi_list()->at(i);
if (!phi->CheckFlag(HValue::kTruncatingToInt32)) continue;
- for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
- HValue* use = it.value();
- if (!use->CheckFlag(HValue::kTruncatingToInt32)) {
- phi->ClearFlag(HValue::kTruncatingToInt32);
- change = true;
- break;
- }
+ if (!phi->CheckUsesForFlag(HValue::kTruncatingToInt32)) {
+ phi->ClearFlag(HValue::kTruncatingToInt32);
+ change = true;
}
}
}
@@ -1969,7 +2120,7 @@ void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
void HGraph::MarkDeoptimizeOnUndefined() {
- HPhase phase("MarkDeoptimizeOnUndefined", this);
+ HPhase phase("H_MarkDeoptimizeOnUndefined", this);
// Compute DeoptimizeOnUndefined flag for phis.
// Any phi that can reach a use with DeoptimizeOnUndefined set must
// have DeoptimizeOnUndefined set. Currently only HCompareIDAndBranch, with
@@ -1991,7 +2142,7 @@ void HGraph::MarkDeoptimizeOnUndefined() {
void HGraph::ComputeMinusZeroChecks() {
- BitVector visited(GetMaximumValueID());
+ BitVector visited(GetMaximumValueID(), zone());
for (int i = 0; i < blocks_.length(); ++i) {
for (HInstruction* current = blocks_[i]->first();
current != NULL;
@@ -2019,12 +2170,12 @@ void HGraph::ComputeMinusZeroChecks() {
FunctionState::FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- bool drop_extra)
+ ReturnHandlingFlag return_handling)
: owner_(owner),
compilation_info_(info),
oracle_(oracle),
call_context_(NULL),
- drop_extra_(drop_extra),
+ return_handling_(return_handling),
function_return_(NULL),
test_context_(NULL),
outer_(owner->function_state()) {
@@ -2067,6 +2218,7 @@ AstContext::AstContext(HGraphBuilder* owner, Expression::Context kind)
for_typeof_(false) {
owner->set_ast_context(this); // Push.
#ifdef DEBUG
+ ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
original_length_ = owner->environment()->length();
#endif
}
@@ -2080,14 +2232,16 @@ AstContext::~AstContext() {
EffectContext::~EffectContext() {
ASSERT(owner()->HasStackOverflow() ||
owner()->current_block() == NULL ||
- owner()->environment()->length() == original_length_);
+ (owner()->environment()->length() == original_length_ &&
+ owner()->environment()->frame_type() == JS_FUNCTION));
}
ValueContext::~ValueContext() {
ASSERT(owner()->HasStackOverflow() ||
owner()->current_block() == NULL ||
- owner()->environment()->length() == original_length_ + 1);
+ (owner()->environment()->length() == original_length_ + 1 &&
+ owner()->environment()->frame_type() == JS_FUNCTION));
}
@@ -2292,7 +2446,7 @@ HGraph* HGraphBuilder::CreateGraph() {
if (FLAG_hydrogen_stats) HStatistics::Instance()->Initialize(info());
{
- HPhase phase("Block building");
+ HPhase phase("H_Block building");
current_block_ = graph()->entry_block();
Scope* scope = info()->scope();
@@ -2300,7 +2454,7 @@ HGraph* HGraphBuilder::CreateGraph() {
Bailout("function with illegal redeclaration");
return NULL;
}
- SetupScope(scope);
+ SetUpScope(scope);
// Add an edge to the body entry. This is warty: the graph's start
// environment will be used by the Lithium translation as the initial
@@ -2326,7 +2480,7 @@ HGraph* HGraphBuilder::CreateGraph() {
// Handle implicit declaration of the function name in named function
// expressions before other declarations.
if (scope->is_function_scope() && scope->function() != NULL) {
- HandleDeclaration(scope->function(), CONST, NULL);
+ HandleDeclaration(scope->function(), CONST, NULL, NULL);
}
VisitDeclarations(scope->declarations());
AddSimulate(AstNode::kDeclarationsId);
@@ -2366,6 +2520,14 @@ HGraph* HGraphBuilder::CreateGraph() {
if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis();
graph()->CollectPhis();
+ if (graph()->has_osr_loop_entry()) {
+ const ZoneList<HPhi*>* phis = graph()->osr_loop_entry()->phis();
+ for (int j = 0; j < phis->length(); j++) {
+ HPhi* phi = phis->at(j);
+ graph()->osr_values()->at(phi->merged_index())->set_incoming_value(phi);
+ }
+ }
+
HInferRepresentation rep(graph());
rep.Analyze();
@@ -2377,14 +2539,15 @@ HGraph* HGraphBuilder::CreateGraph() {
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
- HPhase phase("Global value numbering", graph());
+ HPhase phase("H_Global value numbering", graph());
HGlobalValueNumberer gvn(graph(), info());
bool removed_side_effects = gvn.Analyze();
// Trigger a second analysis pass to further eliminate duplicate values that
// could only be discovered by removing side-effect-generating instructions
// during the first pass.
if (FLAG_smi_only_arrays && removed_side_effects) {
- gvn.Analyze();
+ removed_side_effects = gvn.Analyze();
+ ASSERT(!removed_side_effects);
}
}
@@ -2409,7 +2572,7 @@ HGraph* HGraphBuilder::CreateGraph() {
void HGraph::ReplaceCheckedValues() {
- HPhase phase("Replace checked values", this);
+ HPhase phase("H_Replace checked values", this);
for (int i = 0; i < blocks()->length(); ++i) {
HInstruction* instr = blocks()->at(i)->first();
while (instr != NULL) {
@@ -2449,8 +2612,8 @@ void HGraphBuilder::PushAndAdd(HInstruction* instr) {
}
-template <int V>
-HInstruction* HGraphBuilder::PreProcessCall(HCall<V>* call) {
+template <class Instruction>
+HInstruction* HGraphBuilder::PreProcessCall(Instruction* call) {
int count = call->argument_count();
ZoneList<HValue*> arguments(count);
for (int i = 0; i < count; ++i) {
@@ -2464,12 +2627,16 @@ HInstruction* HGraphBuilder::PreProcessCall(HCall<V>* call) {
}
-void HGraphBuilder::SetupScope(Scope* scope) {
+void HGraphBuilder::SetUpScope(Scope* scope) {
HConstant* undefined_constant = new(zone()) HConstant(
isolate()->factory()->undefined_value(), Representation::Tagged());
AddInstruction(undefined_constant);
graph_->set_undefined_constant(undefined_constant);
+ HArgumentsObject* object = new(zone()) HArgumentsObject;
+ AddInstruction(object);
+ graph()->SetArgumentsObject(object);
+
// Set the initial values of parameters including "this". "This" has
// parameter index 0.
ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
@@ -2496,10 +2663,9 @@ void HGraphBuilder::SetupScope(Scope* scope) {
if (!scope->arguments()->IsStackAllocated()) {
return Bailout("context-allocated arguments");
}
- HArgumentsObject* object = new(zone()) HArgumentsObject;
- AddInstruction(object);
- graph()->SetArgumentsObject(object);
- environment()->Bind(scope->arguments(), object);
+
+ environment()->Bind(scope->arguments(),
+ graph()->GetArgumentsObject());
}
}
@@ -2603,12 +2769,20 @@ void HGraphBuilder::VisitIfStatement(IfStatement* stmt) {
HBasicBlock* HGraphBuilder::BreakAndContinueScope::Get(
BreakableStatement* stmt,
- BreakType type) {
+ BreakType type,
+ int* drop_extra) {
+ *drop_extra = 0;
BreakAndContinueScope* current = this;
while (current != NULL && current->info()->target() != stmt) {
+ *drop_extra += current->info()->drop_extra();
current = current->next();
}
ASSERT(current != NULL); // Always found (unless stack is malformed).
+
+ if (type == BREAK) {
+ *drop_extra += current->info()->drop_extra();
+ }
+
HBasicBlock* block = NULL;
switch (type) {
case BREAK:
@@ -2636,7 +2810,11 @@ void HGraphBuilder::VisitContinueStatement(ContinueStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- HBasicBlock* continue_block = break_scope()->Get(stmt->target(), CONTINUE);
+ int drop_extra = 0;
+ HBasicBlock* continue_block = break_scope()->Get(stmt->target(),
+ CONTINUE,
+ &drop_extra);
+ Drop(drop_extra);
current_block()->Goto(continue_block);
set_current_block(NULL);
}
@@ -2646,7 +2824,11 @@ void HGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- HBasicBlock* break_block = break_scope()->Get(stmt->target(), BREAK);
+ int drop_extra = 0;
+ HBasicBlock* break_block = break_scope()->Get(stmt->target(),
+ BREAK,
+ &drop_extra);
+ Drop(drop_extra);
current_block()->Goto(break_block);
set_current_block(NULL);
}
@@ -2662,7 +2844,38 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
CHECK_ALIVE(VisitForValue(stmt->expression()));
HValue* result = environment()->Pop();
current_block()->FinishExit(new(zone()) HReturn(result));
- set_current_block(NULL);
+ } else if (function_state()->is_construct()) {
+ // Return from an inlined construct call. In a test context the return
+ // value will always evaluate to true, in a value context the return value
+ // needs to be a JSObject.
+ if (context->IsTest()) {
+ TestContext* test = TestContext::cast(context);
+ CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ current_block()->Goto(test->if_true(), function_state()->drop_extra());
+ } else if (context->IsEffect()) {
+ CHECK_ALIVE(VisitForEffect(stmt->expression()));
+ current_block()->Goto(function_return(), function_state()->drop_extra());
+ } else {
+ ASSERT(context->IsValue());
+ CHECK_ALIVE(VisitForValue(stmt->expression()));
+ HValue* return_value = Pop();
+ HValue* receiver = environment()->Lookup(0);
+ HHasInstanceTypeAndBranch* typecheck =
+ new(zone()) HHasInstanceTypeAndBranch(return_value,
+ FIRST_SPEC_OBJECT_TYPE,
+ LAST_SPEC_OBJECT_TYPE);
+ HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
+ HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
+ typecheck->SetSuccessorAt(0, if_spec_object);
+ typecheck->SetSuccessorAt(1, not_spec_object);
+ current_block()->Finish(typecheck);
+ if_spec_object->AddLeaveInlined(return_value,
+ function_return(),
+ function_state()->drop_extra());
+ not_spec_object->AddLeaveInlined(receiver,
+ function_return(),
+ function_state()->drop_extra());
+ }
} else {
// Return from an inlined function, visit the subexpression in the
// expression context of the call.
@@ -2677,13 +2890,13 @@ void HGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
} else {
ASSERT(context->IsValue());
CHECK_ALIVE(VisitForValue(stmt->expression()));
- HValue* return_value = environment()->Pop();
+ HValue* return_value = Pop();
current_block()->AddLeaveInlined(return_value,
function_return(),
function_state()->drop_extra());
}
- set_current_block(NULL);
}
+ set_current_block(NULL);
}
@@ -2708,43 +2921,98 @@ void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
return Bailout("SwitchStatement: too many clauses");
}
+ HValue* context = environment()->LookupContext();
+
CHECK_ALIVE(VisitForValue(stmt->tag()));
AddSimulate(stmt->EntryId());
HValue* tag_value = Pop();
HBasicBlock* first_test_block = current_block();
- // 1. Build all the tests, with dangling true branches. Unconditionally
- // deoptimize if we encounter a non-smi comparison.
+ SwitchType switch_type = UNKNOWN_SWITCH;
+
+ // 1. Extract clause type
for (int i = 0; i < clause_count; ++i) {
CaseClause* clause = clauses->at(i);
if (clause->is_default()) continue;
- if (!clause->label()->IsSmiLiteral()) {
- return Bailout("SwitchStatement: non-literal switch label");
+
+ if (switch_type == UNKNOWN_SWITCH) {
+ if (clause->label()->IsSmiLiteral()) {
+ switch_type = SMI_SWITCH;
+ } else if (clause->label()->IsStringLiteral()) {
+ switch_type = STRING_SWITCH;
+ } else {
+ return Bailout("SwitchStatement: non-literal switch label");
+ }
+ } else if ((switch_type == STRING_SWITCH &&
+ !clause->label()->IsStringLiteral()) ||
+ (switch_type == SMI_SWITCH &&
+ !clause->label()->IsSmiLiteral())) {
+ return Bailout("SwitchStatemnt: mixed label types are not supported");
}
+ }
- // Unconditionally deoptimize on the first non-smi compare.
- clause->RecordTypeFeedback(oracle());
- if (!clause->IsSmiCompare()) {
- // Finish with deoptimize and add uses of enviroment values to
- // account for invisible uses.
- current_block()->FinishExitWithDeoptimization(HDeoptimize::kUseAll);
- set_current_block(NULL);
- break;
+ HUnaryControlInstruction* string_check = NULL;
+ HBasicBlock* not_string_block = NULL;
+
+ // Test switch's tag value if all clauses are string literals
+ if (switch_type == STRING_SWITCH) {
+ string_check = new(zone()) HIsStringAndBranch(tag_value);
+ first_test_block = graph()->CreateBasicBlock();
+ not_string_block = graph()->CreateBasicBlock();
+
+ string_check->SetSuccessorAt(0, first_test_block);
+ string_check->SetSuccessorAt(1, not_string_block);
+ current_block()->Finish(string_check);
+
+ set_current_block(first_test_block);
+ }
+
+ // 2. Build all the tests, with dangling true branches
+ int default_id = AstNode::kNoNumber;
+ for (int i = 0; i < clause_count; ++i) {
+ CaseClause* clause = clauses->at(i);
+ if (clause->is_default()) {
+ default_id = clause->EntryId();
+ continue;
+ }
+ if (switch_type == SMI_SWITCH) {
+ clause->RecordTypeFeedback(oracle());
}
- // Otherwise generate a compare and branch.
+ // Generate a compare and branch.
CHECK_ALIVE(VisitForValue(clause->label()));
HValue* label_value = Pop();
- HCompareIDAndBranch* compare =
- new(zone()) HCompareIDAndBranch(tag_value,
- label_value,
- Token::EQ_STRICT);
- compare->SetInputRepresentation(Representation::Integer32());
- HBasicBlock* body_block = graph()->CreateBasicBlock();
+
HBasicBlock* next_test_block = graph()->CreateBasicBlock();
+ HBasicBlock* body_block = graph()->CreateBasicBlock();
+
+ HControlInstruction* compare;
+
+ if (switch_type == SMI_SWITCH) {
+ if (!clause->IsSmiCompare()) {
+ // Finish with deoptimize and add uses of enviroment values to
+ // account for invisible uses.
+ current_block()->FinishExitWithDeoptimization(HDeoptimize::kUseAll);
+ set_current_block(NULL);
+ break;
+ }
+
+ HCompareIDAndBranch* compare_ =
+ new(zone()) HCompareIDAndBranch(tag_value,
+ label_value,
+ Token::EQ_STRICT);
+ compare_->SetInputRepresentation(Representation::Integer32());
+ compare = compare_;
+ } else {
+ compare = new(zone()) HStringCompareAndBranch(context, tag_value,
+ label_value,
+ Token::EQ_STRICT);
+ }
+
compare->SetSuccessorAt(0, body_block);
compare->SetSuccessorAt(1, next_test_block);
current_block()->Finish(compare);
+
set_current_block(next_test_block);
}
@@ -2752,10 +3020,18 @@ void HGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
// exit. This block is NULL if we deoptimized.
HBasicBlock* last_block = current_block();
- // 2. Loop over the clauses and the linked list of tests in lockstep,
+ if (not_string_block != NULL) {
+ int join_id = (default_id != AstNode::kNoNumber)
+ ? default_id
+ : stmt->ExitId();
+ last_block = CreateJoin(last_block, not_string_block, join_id);
+ }
+
+ // 3. Loop over the clauses and the linked list of tests in lockstep,
// translating the clause bodies.
HBasicBlock* curr_test_block = first_test_block;
HBasicBlock* fall_through_block = NULL;
+
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
for (int i = 0; i < clause_count; ++i) {
@@ -2824,8 +3100,8 @@ bool HGraphBuilder::HasOsrEntryAt(IterationStatement* statement) {
}
-void HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
- if (!HasOsrEntryAt(statement)) return;
+bool HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
+ if (!HasOsrEntryAt(statement)) return false;
HBasicBlock* non_osr_entry = graph()->CreateBasicBlock();
HBasicBlock* osr_entry = graph()->CreateBasicBlock();
@@ -2838,15 +3114,30 @@ void HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
set_current_block(osr_entry);
int osr_entry_id = statement->OsrEntryId();
- // We want the correct environment at the OsrEntry instruction. Build
- // it explicitly. The expression stack should be empty.
- ASSERT(environment()->ExpressionStackIsEmpty());
- for (int i = 0; i < environment()->length(); ++i) {
+ int first_expression_index = environment()->first_expression_index();
+ int length = environment()->length();
+ ZoneList<HUnknownOSRValue*>* osr_values =
+ new(zone()) ZoneList<HUnknownOSRValue*>(length);
+
+ for (int i = 0; i < first_expression_index; ++i) {
HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
AddInstruction(osr_value);
environment()->Bind(i, osr_value);
+ osr_values->Add(osr_value);
+ }
+
+ if (first_expression_index != length) {
+ environment()->Drop(length - first_expression_index);
+ for (int i = first_expression_index; i < length; ++i) {
+ HUnknownOSRValue* osr_value = new(zone()) HUnknownOSRValue;
+ AddInstruction(osr_value);
+ environment()->Push(osr_value);
+ osr_values->Add(osr_value);
+ }
}
+ graph()->set_osr_values(osr_values);
+
AddSimulate(osr_entry_id);
AddInstruction(new(zone()) HOsrEntry(osr_entry_id));
HContext* context = new(zone()) HContext;
@@ -2855,6 +3146,7 @@ void HGraphBuilder::PreProcessOsrEntry(IterationStatement* statement) {
current_block()->Goto(loop_predecessor);
loop_predecessor->SetJoinId(statement->EntryId());
set_current_block(loop_predecessor);
+ return true;
}
@@ -2878,10 +3170,11 @@ void HGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
ASSERT(current_block() != NULL);
- PreProcessOsrEntry(stmt);
+ bool osr_entry = PreProcessOsrEntry(stmt);
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
current_block()->Goto(loop_entry);
set_current_block(loop_entry);
+ if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
BreakAndContinueInfo break_info(stmt);
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
@@ -2920,10 +3213,12 @@ void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
ASSERT(current_block() != NULL);
- PreProcessOsrEntry(stmt);
+ bool osr_entry = PreProcessOsrEntry(stmt);
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
current_block()->Goto(loop_entry);
set_current_block(loop_entry);
+ if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
+
// If the condition is constant true, do not generate a branch.
HBasicBlock* loop_successor = NULL;
@@ -2944,7 +3239,6 @@ void HGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
BreakAndContinueInfo break_info(stmt);
if (current_block() != NULL) {
- BreakAndContinueScope push(&break_info, this);
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
}
HBasicBlock* body_exit =
@@ -2966,10 +3260,11 @@ void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
CHECK_ALIVE(Visit(stmt->init()));
}
ASSERT(current_block() != NULL);
- PreProcessOsrEntry(stmt);
+ bool osr_entry = PreProcessOsrEntry(stmt);
HBasicBlock* loop_entry = CreateLoopHeaderBlock();
current_block()->Goto(loop_entry);
set_current_block(loop_entry);
+ if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
HBasicBlock* loop_successor = NULL;
if (stmt->cond() != NULL) {
@@ -2989,7 +3284,6 @@ void HGraphBuilder::VisitForStatement(ForStatement* stmt) {
BreakAndContinueInfo break_info(stmt);
if (current_block() != NULL) {
- BreakAndContinueScope push(&break_info, this);
CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
}
HBasicBlock* body_exit =
@@ -3014,7 +3308,119 @@ void HGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("ForInStatement");
+
+ if (!FLAG_optimize_for_in) {
+ return Bailout("ForInStatement optimization is disabled");
+ }
+
+ if (!oracle()->IsForInFastCase(stmt)) {
+ return Bailout("ForInStatement is not fast case");
+ }
+
+ if (!stmt->each()->IsVariableProxy() ||
+ !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
+ return Bailout("ForInStatement with non-local each variable");
+ }
+
+ Variable* each_var = stmt->each()->AsVariableProxy()->var();
+
+ CHECK_ALIVE(VisitForValue(stmt->enumerable()));
+ HValue* enumerable = Top(); // Leave enumerable at the top.
+
+ HInstruction* map = AddInstruction(new(zone()) HForInPrepareMap(
+ environment()->LookupContext(), enumerable));
+ AddSimulate(stmt->PrepareId());
+
+ HInstruction* array = AddInstruction(
+ new(zone()) HForInCacheArray(
+ enumerable,
+ map,
+ DescriptorArray::kEnumCacheBridgeCacheIndex));
+
+ HInstruction* array_length = AddInstruction(
+ new(zone()) HFixedArrayBaseLength(array));
+
+ HInstruction* start_index = AddInstruction(new(zone()) HConstant(
+ Handle<Object>(Smi::FromInt(0)), Representation::Integer32()));
+
+ Push(map);
+ Push(array);
+ Push(array_length);
+ Push(start_index);
+
+ HInstruction* index_cache = AddInstruction(
+ new(zone()) HForInCacheArray(
+ enumerable,
+ map,
+ DescriptorArray::kEnumCacheBridgeIndicesCacheIndex));
+ HForInCacheArray::cast(array)->set_index_cache(
+ HForInCacheArray::cast(index_cache));
+
+ bool osr_entry = PreProcessOsrEntry(stmt);
+ HBasicBlock* loop_entry = CreateLoopHeaderBlock();
+ current_block()->Goto(loop_entry);
+ set_current_block(loop_entry);
+ if (osr_entry) graph()->set_osr_loop_entry(loop_entry);
+
+ HValue* index = environment()->ExpressionStackAt(0);
+ HValue* limit = environment()->ExpressionStackAt(1);
+
+ // Check that we still have more keys.
+ HCompareIDAndBranch* compare_index =
+ new(zone()) HCompareIDAndBranch(index, limit, Token::LT);
+ compare_index->SetInputRepresentation(Representation::Integer32());
+
+ HBasicBlock* loop_body = graph()->CreateBasicBlock();
+ HBasicBlock* loop_successor = graph()->CreateBasicBlock();
+
+ compare_index->SetSuccessorAt(0, loop_body);
+ compare_index->SetSuccessorAt(1, loop_successor);
+ current_block()->Finish(compare_index);
+
+ set_current_block(loop_successor);
+ Drop(5);
+
+ set_current_block(loop_body);
+
+ HValue* key = AddInstruction(
+ new(zone()) HLoadKeyedFastElement(
+ environment()->ExpressionStackAt(2), // Enum cache.
+ environment()->ExpressionStackAt(0), // Iteration index.
+ HLoadKeyedFastElement::OMIT_HOLE_CHECK));
+
+ // Check if the expected map still matches that of the enumerable.
+ // If not just deoptimize.
+ AddInstruction(new(zone()) HCheckMapValue(
+ environment()->ExpressionStackAt(4),
+ environment()->ExpressionStackAt(3)));
+
+ Bind(each_var, key);
+
+ BreakAndContinueInfo break_info(stmt, 5);
+ CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
+
+ HBasicBlock* body_exit =
+ JoinContinue(stmt, current_block(), break_info.continue_block());
+
+ if (body_exit != NULL) {
+ set_current_block(body_exit);
+
+ HValue* current_index = Pop();
+ HInstruction* new_index = new(zone()) HAdd(environment()->LookupContext(),
+ current_index,
+ graph()->GetConstant1());
+ new_index->AssumeRepresentation(Representation::Integer32());
+ PushAndAdd(new_index);
+ body_exit = current_block();
+ }
+
+ HBasicBlock* loop_exit = CreateLoop(stmt,
+ loop_entry,
+ body_exit,
+ loop_successor,
+ break_info.break_block());
+
+ set_current_block(loop_exit);
}
@@ -3135,7 +3541,7 @@ HGraphBuilder::GlobalPropertyAccess HGraphBuilder::LookupGlobalProperty(
}
Handle<GlobalObject> global(info()->global_object());
global->Lookup(*var->name(), lookup);
- if (!lookup->IsProperty() ||
+ if (!lookup->IsFound() ||
lookup->type() != NORMAL ||
(is_store && lookup->IsReadOnly()) ||
lookup->holder() != *global) {
@@ -3164,11 +3570,11 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
Variable* variable = expr->var();
- if (variable->mode() == LET) {
- return Bailout("reference to let variable");
- }
switch (variable->location()) {
case Variable::UNALLOCATED: {
+ if (variable->mode() == LET || variable->mode() == CONST_HARMONY) {
+ return Bailout("reference to global harmony declared variable");
+ }
// Handle known global constants like 'undefined' specially to avoid a
// load from a global cell for them.
Handle<Object> constant_value =
@@ -3197,7 +3603,6 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
} else {
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
- if (variable->is_qml_global()) global_object->set_qml_global(true);
AddInstruction(global_object);
HLoadGlobalGeneric* instr =
new(zone()) HLoadGlobalGeneric(context,
@@ -3212,20 +3617,18 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
case Variable::PARAMETER:
case Variable::LOCAL: {
HValue* value = environment()->Lookup(variable);
- if (variable->mode() == CONST &&
- value == graph()->GetConstantHole()) {
- return Bailout("reference to uninitialized const variable");
+ if (value == graph()->GetConstantHole()) {
+ ASSERT(variable->mode() == CONST ||
+ variable->mode() == CONST_HARMONY ||
+ variable->mode() == LET);
+ return Bailout("reference to uninitialized variable");
}
return ast_context()->ReturnValue(value);
}
case Variable::CONTEXT: {
- if (variable->mode() == CONST) {
- return Bailout("reference to const context slot");
- }
HValue* context = BuildContextChainWalk(variable);
- HLoadContextSlot* instr =
- new(zone()) HLoadContextSlot(context, variable->index());
+ HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
return ast_context()->ReturnInstruction(instr, expr->id());
}
@@ -3259,18 +3662,99 @@ void HGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+// Determines whether the given array or object literal boilerplate satisfies
+// all limits to be considered for fast deep-copying and computes the total
+// size of all objects that are part of the graph.
+static bool IsFastLiteral(Handle<JSObject> boilerplate,
+ int max_depth,
+ int* max_properties,
+ int* total_size) {
+ ASSERT(max_depth >= 0 && *max_properties >= 0);
+ if (max_depth == 0) return false;
+
+ Handle<FixedArrayBase> elements(boilerplate->elements());
+ if (elements->length() > 0 &&
+ elements->map() != boilerplate->GetHeap()->fixed_cow_array_map()) {
+ if (boilerplate->HasFastDoubleElements()) {
+ *total_size += FixedDoubleArray::SizeFor(elements->length());
+ } else if (boilerplate->HasFastElements()) {
+ int length = elements->length();
+ for (int i = 0; i < length; i++) {
+ if ((*max_properties)-- == 0) return false;
+ Handle<Object> value = JSObject::GetElement(boilerplate, i);
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ if (!IsFastLiteral(value_object,
+ max_depth - 1,
+ max_properties,
+ total_size)) {
+ return false;
+ }
+ }
+ }
+ *total_size += FixedArray::SizeFor(length);
+ } else {
+ return false;
+ }
+ }
+
+ Handle<FixedArray> properties(boilerplate->properties());
+ if (properties->length() > 0) {
+ return false;
+ } else {
+ int nof = boilerplate->map()->inobject_properties();
+ for (int i = 0; i < nof; i++) {
+ if ((*max_properties)-- == 0) return false;
+ Handle<Object> value(boilerplate->InObjectPropertyAt(i));
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ if (!IsFastLiteral(value_object,
+ max_depth - 1,
+ max_properties,
+ total_size)) {
+ return false;
+ }
+ }
+ }
+ }
+
+ *total_size += boilerplate->map()->instance_size();
+ return true;
+}
+
+
void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
+ Handle<JSFunction> closure = function_state()->compilation_info()->closure();
HValue* context = environment()->LookupContext();
- HObjectLiteral* literal =
- new(zone()) HObjectLiteral(context,
- expr->constant_properties(),
- expr->fast_elements(),
- expr->literal_index(),
- expr->depth(),
- expr->has_function());
+ HInstruction* literal;
+
+ // Check whether to use fast or slow deep-copying for boilerplate.
+ int total_size = 0;
+ int max_properties = HFastLiteral::kMaxLiteralProperties;
+ Handle<Object> boilerplate(closure->literals()->get(expr->literal_index()));
+ if (boilerplate->IsJSObject() &&
+ IsFastLiteral(Handle<JSObject>::cast(boilerplate),
+ HFastLiteral::kMaxLiteralDepth,
+ &max_properties,
+ &total_size)) {
+ Handle<JSObject> boilerplate_object = Handle<JSObject>::cast(boilerplate);
+ literal = new(zone()) HFastLiteral(context,
+ boilerplate_object,
+ total_size,
+ expr->literal_index(),
+ expr->depth());
+ } else {
+ literal = new(zone()) HObjectLiteral(context,
+ expr->constant_properties(),
+ expr->fast_elements(),
+ expr->literal_index(),
+ expr->depth(),
+ expr->has_function());
+ }
+
// The object is expected in the bailout environment during computation
// of the property values and is the value of the entire expression.
PushAndAdd(literal);
@@ -3291,18 +3775,12 @@ void HGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
case ObjectLiteral::Property::COMPUTED:
if (key->handle()->IsSymbol()) {
if (property->emit_store()) {
+ property->RecordTypeFeedback(oracle());
CHECK_ALIVE(VisitForValue(value));
HValue* value = Pop();
- Handle<String> name = Handle<String>::cast(key->handle());
- HStoreNamedGeneric* store =
- new(zone()) HStoreNamedGeneric(
- context,
- literal,
- name,
- value,
- function_strict_mode_flag());
+ HInstruction* store = BuildStoreNamed(literal, value, property);
AddInstruction(store);
- AddSimulate(key->id());
+ if (store->HasObservableSideEffects()) AddSimulate(key->id());
} else {
CHECK_ALIVE(VisitForEffect(value));
}
@@ -3339,12 +3817,48 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
ZoneList<Expression*>* subexprs = expr->values();
int length = subexprs->length();
HValue* context = environment()->LookupContext();
+ HInstruction* literal;
+
+ Handle<FixedArray> literals(environment()->closure()->literals());
+ Handle<Object> raw_boilerplate(literals->get(expr->literal_index()));
+
+ if (raw_boilerplate->IsUndefined()) {
+ raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
+ isolate(), literals, expr->constant_elements());
+ if (raw_boilerplate.is_null()) {
+ return Bailout("array boilerplate creation failed");
+ }
+ literals->set(expr->literal_index(), *raw_boilerplate);
+ if (JSObject::cast(*raw_boilerplate)->elements()->map() ==
+ isolate()->heap()->fixed_cow_array_map()) {
+ isolate()->counters()->cow_arrays_created_runtime()->Increment();
+ }
+ }
+
+ Handle<JSObject> boilerplate = Handle<JSObject>::cast(raw_boilerplate);
+ ElementsKind boilerplate_elements_kind =
+ Handle<JSObject>::cast(boilerplate)->GetElementsKind();
+
+ // Check whether to use fast or slow deep-copying for boilerplate.
+ int total_size = 0;
+ int max_properties = HFastLiteral::kMaxLiteralProperties;
+ if (IsFastLiteral(boilerplate,
+ HFastLiteral::kMaxLiteralDepth,
+ &max_properties,
+ &total_size)) {
+ literal = new(zone()) HFastLiteral(context,
+ boilerplate,
+ total_size,
+ expr->literal_index(),
+ expr->depth());
+ } else {
+ literal = new(zone()) HArrayLiteral(context,
+ boilerplate,
+ length,
+ expr->literal_index(),
+ expr->depth());
+ }
- HArrayLiteral* literal = new(zone()) HArrayLiteral(context,
- expr->constant_elements(),
- length,
- expr->literal_index(),
- expr->depth());
// The array is expected in the bailout environment during computation
// of the property values and is the value of the entire expression.
PushAndAdd(literal);
@@ -3367,42 +3881,28 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
HValue* key = AddInstruction(
new(zone()) HConstant(Handle<Object>(Smi::FromInt(i)),
Representation::Integer32()));
- HInstruction* elements_kind =
- AddInstruction(new(zone()) HElementsKind(literal));
- HBasicBlock* store_fast = graph()->CreateBasicBlock();
- // Two empty blocks to satisfy edge split form.
- HBasicBlock* store_fast_edgesplit1 = graph()->CreateBasicBlock();
- HBasicBlock* store_fast_edgesplit2 = graph()->CreateBasicBlock();
- HBasicBlock* store_generic = graph()->CreateBasicBlock();
- HBasicBlock* check_smi_only_elements = graph()->CreateBasicBlock();
- HBasicBlock* join = graph()->CreateBasicBlock();
-
- HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(value);
- smicheck->SetSuccessorAt(0, store_fast_edgesplit1);
- smicheck->SetSuccessorAt(1, check_smi_only_elements);
- current_block()->Finish(smicheck);
- store_fast_edgesplit1->Finish(new(zone()) HGoto(store_fast));
-
- set_current_block(check_smi_only_elements);
- HCompareConstantEqAndBranch* smi_elements_check =
- new(zone()) HCompareConstantEqAndBranch(elements_kind,
- FAST_ELEMENTS,
- Token::EQ_STRICT);
- smi_elements_check->SetSuccessorAt(0, store_fast_edgesplit2);
- smi_elements_check->SetSuccessorAt(1, store_generic);
- current_block()->Finish(smi_elements_check);
- store_fast_edgesplit2->Finish(new(zone()) HGoto(store_fast));
-
- set_current_block(store_fast);
- AddInstruction(new(zone()) HStoreKeyedFastElement(elements, key, value));
- store_fast->Goto(join);
-
- set_current_block(store_generic);
- AddInstruction(BuildStoreKeyedGeneric(literal, key, value));
- store_generic->Goto(join);
- join->SetJoinId(expr->id());
- set_current_block(join);
+ switch (boilerplate_elements_kind) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ // Smi-only arrays need a smi check.
+ AddInstruction(new(zone()) HCheckSmi(value));
+ // Fall through.
+ case FAST_ELEMENTS:
+ AddInstruction(new(zone()) HStoreKeyedFastElement(
+ elements,
+ key,
+ value,
+ boilerplate_elements_kind));
+ break;
+ case FAST_DOUBLE_ELEMENTS:
+ AddInstruction(new(zone()) HStoreKeyedFastDoubleElement(elements,
+ key,
+ value));
+ break;
+ default:
+ UNREACHABLE();
+ break;
+ }
AddSimulate(expr->GetIdForElement(i));
}
@@ -3410,21 +3910,22 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
}
-// Sets the lookup result and returns true if the store can be inlined.
-static bool ComputeStoredField(Handle<Map> type,
- Handle<String> name,
- LookupResult* lookup) {
+// Sets the lookup result and returns true if the load/store can be inlined.
+static bool ComputeLoadStoreField(Handle<Map> type,
+ Handle<String> name,
+ LookupResult* lookup,
+ bool is_store) {
type->LookupInDescriptors(NULL, *name, lookup);
- if (!lookup->IsPropertyOrTransition()) return false;
+ if (!lookup->IsFound()) return false;
if (lookup->type() == FIELD) return true;
- return (lookup->type() == MAP_TRANSITION) &&
+ return is_store && (lookup->type() == MAP_TRANSITION) &&
(type->unused_property_fields() > 0);
}
-static int ComputeStoredFieldIndex(Handle<Map> type,
- Handle<String> name,
- LookupResult* lookup) {
+static int ComputeLoadStoreFieldIndex(Handle<Map> type,
+ Handle<String> name,
+ LookupResult* lookup) {
ASSERT(lookup->type() == FIELD || lookup->type() == MAP_TRANSITION);
if (lookup->type() == FIELD) {
return lookup->GetLocalFieldIndexFromMap(*type);
@@ -3443,10 +3944,10 @@ HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
bool smi_and_map_check) {
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(new(zone()) HCheckMap(object, type));
+ AddInstruction(HCheckMaps::NewWithTransitions(object, type));
}
- int index = ComputeStoredFieldIndex(type, name, lookup);
+ int index = ComputeLoadStoreFieldIndex(type, name, lookup);
bool is_in_object = index < 0;
int offset = index * kPointerSize;
if (index < 0) {
@@ -3463,7 +3964,7 @@ HInstruction* HGraphBuilder::BuildStoreNamedField(HValue* object,
instr->set_transition(transition);
// TODO(fschneider): Record the new map type of the object in the IR to
// enable elimination of redundant checks after the transition store.
- instr->SetFlag(HValue::kChangesMaps);
+ instr->SetGVNFlag(kChangesMaps);
}
return instr;
}
@@ -3484,6 +3985,25 @@ HInstruction* HGraphBuilder::BuildStoreNamedGeneric(HValue* object,
HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
HValue* value,
+ ObjectLiteral::Property* prop) {
+ Literal* key = prop->key()->AsLiteral();
+ Handle<String> name = Handle<String>::cast(key->handle());
+ ASSERT(!name.is_null());
+
+ LookupResult lookup(isolate());
+ Handle<Map> type = prop->GetReceiverType();
+ bool is_monomorphic = prop->IsMonomorphic() &&
+ ComputeLoadStoreField(type, name, &lookup, true);
+
+ return is_monomorphic
+ ? BuildStoreNamedField(object, name, value, type, &lookup,
+ true) // Needs smi and map check.
+ : BuildStoreNamedGeneric(object, name, value);
+}
+
+
+HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
+ HValue* value,
Expression* expr) {
Property* prop = (expr->AsProperty() != NULL)
? expr->AsProperty()
@@ -3495,7 +4015,7 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
LookupResult lookup(isolate());
SmallMapList* types = expr->GetReceiverTypes();
bool is_monomorphic = expr->IsMonomorphic() &&
- ComputeStoredField(types->first(), name, &lookup);
+ ComputeLoadStoreField(types->first(), name, &lookup, true);
return is_monomorphic
? BuildStoreNamedField(object, name, value, types->first(), &lookup,
@@ -3504,6 +4024,59 @@ HInstruction* HGraphBuilder::BuildStoreNamed(HValue* object,
}
+void HGraphBuilder::HandlePolymorphicLoadNamedField(Property* expr,
+ HValue* object,
+ SmallMapList* types,
+ Handle<String> name) {
+ int count = 0;
+ int previous_field_offset = 0;
+ bool previous_field_is_in_object = false;
+ bool is_monomorphic_field = true;
+ Handle<Map> map;
+ LookupResult lookup(isolate());
+ for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
+ map = types->at(i);
+ if (ComputeLoadStoreField(map, name, &lookup, false)) {
+ int index = ComputeLoadStoreFieldIndex(map, name, &lookup);
+ bool is_in_object = index < 0;
+ int offset = index * kPointerSize;
+ if (index < 0) {
+ // Negative property indices are in-object properties, indexed
+ // from the end of the fixed part of the object.
+ offset += map->instance_size();
+ } else {
+ offset += FixedArray::kHeaderSize;
+ }
+ if (count == 0) {
+ previous_field_offset = offset;
+ previous_field_is_in_object = is_in_object;
+ } else if (is_monomorphic_field) {
+ is_monomorphic_field = (offset == previous_field_offset) &&
+ (is_in_object == previous_field_is_in_object);
+ }
+ ++count;
+ }
+ }
+
+ // Use monomorphic load if property lookup results in the same field index
+ // for all maps. Requires special map check on the set of all handled maps.
+ HInstruction* instr;
+ if (count == types->length() && is_monomorphic_field) {
+ AddInstruction(new(zone()) HCheckMaps(object, types));
+ instr = BuildLoadNamedField(object, expr, map, &lookup, false);
+ } else {
+ HValue* context = environment()->LookupContext();
+ instr = new(zone()) HLoadNamedFieldPolymorphic(context,
+ object,
+ types,
+ name);
+ }
+
+ instr->set_position(expr->position());
+ return ast_context()->ReturnInstruction(instr, expr->id());
+}
+
+
void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
@@ -3517,7 +4090,7 @@ void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
Handle<Map> map = types->at(i);
LookupResult lookup(isolate());
- if (ComputeStoredField(map, name, &lookup)) {
+ if (ComputeLoadStoreField(map, name, &lookup, true)) {
if (count == 0) {
AddInstruction(new(zone()) HCheckNonSmi(object)); // Only needed once.
join = graph()->CreateBasicBlock();
@@ -3657,7 +4230,6 @@ void HGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
} else {
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
- if (var->is_qml_global()) global_object->set_qml_global(true);
AddInstruction(global_object);
HStoreGlobalGeneric* instr =
new(zone()) HStoreGlobalGeneric(context,
@@ -3685,8 +4257,8 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
if (proxy != NULL) {
Variable* var = proxy->var();
- if (var->mode() == CONST || var->mode() == LET) {
- return Bailout("unsupported let or const compound assignment");
+ if (var->mode() == LET) {
+ return Bailout("unsupported let compound assignment");
}
CHECK_ALIVE(VisitForValue(operation));
@@ -3701,6 +4273,9 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
case Variable::PARAMETER:
case Variable::LOCAL:
+ if (var->mode() == CONST) {
+ return Bailout("unsupported const compound assignment");
+ }
Bind(var, Top());
break;
@@ -3721,9 +4296,25 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
}
}
+ HStoreContextSlot::Mode mode;
+
+ switch (var->mode()) {
+ case LET:
+ mode = HStoreContextSlot::kCheckDeoptimize;
+ break;
+ case CONST:
+ return ast_context()->ReturnValue(Pop());
+ case CONST_HARMONY:
+ // This case is checked statically so no need to
+ // perform checks here
+ UNREACHABLE();
+ default:
+ mode = HStoreContextSlot::kNoCheck;
+ }
+
HValue* context = BuildContextChainWalk(var);
HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, var->index(), Top());
+ new(zone()) HStoreContextSlot(context, var->index(), mode, Top());
AddInstruction(instr);
if (instr->HasObservableSideEffects()) {
AddSimulate(expr->AssignmentId());
@@ -3832,19 +4423,23 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
HandlePropertyAssignment(expr);
} else if (proxy != NULL) {
Variable* var = proxy->var();
+
if (var->mode() == CONST) {
if (expr->op() != Token::INIT_CONST) {
- return Bailout("non-initializer assignment to const");
+ CHECK_ALIVE(VisitForValue(expr->value()));
+ return ast_context()->ReturnValue(Pop());
}
- if (!var->IsStackAllocated()) {
- return Bailout("assignment to const context slot");
+
+ if (var->IsStackAllocated()) {
+ // We insert a use of the old value to detect unsupported uses of const
+ // variables (e.g. initialization inside a loop).
+ HValue* old_value = environment()->Lookup(var);
+ AddInstruction(new HUseConst(old_value));
+ }
+ } else if (var->mode() == CONST_HARMONY) {
+ if (expr->op() != Token::INIT_CONST_HARMONY) {
+ return Bailout("non-initializer assignment to const");
}
- // We insert a use of the old value to detect unsupported uses of const
- // variables (e.g. initialization inside a loop).
- HValue* old_value = environment()->Lookup(var);
- AddInstruction(new HUseConst(old_value));
- } else if (var->mode() == LET) {
- return Bailout("unsupported assignment to let");
}
if (proxy->IsArguments()) return Bailout("assignment to arguments");
@@ -3861,6 +4456,14 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
case Variable::PARAMETER:
case Variable::LOCAL: {
+ // Perform an initialization check for let declared variables
+ // or parameters.
+ if (var->mode() == LET && expr->op() == Token::ASSIGN) {
+ HValue* env_value = environment()->Lookup(var);
+ if (env_value == graph()->GetConstantHole()) {
+ return Bailout("assignment to let variable before initialization");
+ }
+ }
// We do not allow the arguments object to occur in a context where it
// may escape, but assignments to stack-allocated locals are
// permitted.
@@ -3871,7 +4474,6 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
}
case Variable::CONTEXT: {
- ASSERT(var->mode() != CONST);
// Bail out if we try to mutate a parameter value in a function using
// the arguments object. We do not (yet) correctly handle the
// arguments property of the function.
@@ -3887,9 +4489,34 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) {
}
CHECK_ALIVE(VisitForValue(expr->value()));
+ HStoreContextSlot::Mode mode;
+ if (expr->op() == Token::ASSIGN) {
+ switch (var->mode()) {
+ case LET:
+ mode = HStoreContextSlot::kCheckDeoptimize;
+ break;
+ case CONST:
+ return ast_context()->ReturnValue(Pop());
+ case CONST_HARMONY:
+ // This case is checked statically so no need to
+ // perform checks here
+ UNREACHABLE();
+ default:
+ mode = HStoreContextSlot::kNoCheck;
+ }
+ } else if (expr->op() == Token::INIT_VAR ||
+ expr->op() == Token::INIT_LET ||
+ expr->op() == Token::INIT_CONST_HARMONY) {
+ mode = HStoreContextSlot::kNoCheck;
+ } else {
+ ASSERT(expr->op() == Token::INIT_CONST);
+
+ mode = HStoreContextSlot::kCheckIgnoreAssignment;
+ }
+
HValue* context = BuildContextChainWalk(var);
- HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, var->index(), Top());
+ HStoreContextSlot* instr = new(zone()) HStoreContextSlot(
+ context, var->index(), mode, Top());
AddInstruction(instr);
if (instr->HasObservableSideEffects()) {
AddSimulate(expr->AssignmentId());
@@ -3934,7 +4561,7 @@ HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
bool smi_and_map_check) {
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(new(zone()) HCheckMap(object, type));
+ AddInstruction(HCheckMaps::NewWithTransitions(object, type));
}
int index = lookup->GetLocalFieldIndexFromMap(*type);
@@ -3953,6 +4580,10 @@ HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
HInstruction* HGraphBuilder::BuildLoadNamedGeneric(HValue* obj,
Property* expr) {
+ if (expr->IsUninitialized() && !FLAG_always_opt) {
+ AddInstruction(new(zone()) HSoftDeoptimize);
+ current_block()->MarkAsDeoptimizing();
+ }
ASSERT(expr->key()->IsPropertyName());
Handle<Object> name = expr->key()->AsLiteral()->handle();
HValue* context = environment()->LookupContext();
@@ -3966,15 +4597,15 @@ HInstruction* HGraphBuilder::BuildLoadNamed(HValue* obj,
Handle<String> name) {
LookupResult lookup(isolate());
map->LookupInDescriptors(NULL, *name, &lookup);
- if (lookup.IsProperty() && lookup.type() == FIELD) {
+ if (lookup.IsFound() && lookup.type() == FIELD) {
return BuildLoadNamedField(obj,
expr,
map,
&lookup,
true);
- } else if (lookup.IsProperty() && lookup.type() == CONSTANT_FUNCTION) {
+ } else if (lookup.IsFound() && lookup.type() == CONSTANT_FUNCTION) {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- AddInstruction(new(zone()) HCheckMap(obj, map));
+ AddInstruction(HCheckMaps::NewWithTransitions(obj, map));
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*map));
return new(zone()) HConstant(function, Representation::Tagged());
} else {
@@ -4000,9 +4631,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
ASSERT(val != NULL);
switch (elements_kind) {
case EXTERNAL_PIXEL_ELEMENTS: {
- HClampToUint8* clamp = new(zone()) HClampToUint8(val);
- AddInstruction(clamp);
- val = clamp;
+ val = AddInstruction(new(zone()) HClampToUint8(val));
break;
}
case EXTERNAL_BYTE_ELEMENTS:
@@ -4011,9 +4640,13 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
- HToInt32* floor_val = new(zone()) HToInt32(val);
- AddInstruction(floor_val);
- val = floor_val;
+ if (!val->representation().IsInteger32()) {
+ val = AddInstruction(new(zone()) HChange(
+ val,
+ Representation::Integer32(),
+ true, // Truncate to int32.
+ false)); // Don't deoptimize undefined (irrelevant here).
+ }
break;
}
case EXTERNAL_FLOAT_ELEMENTS:
@@ -4030,6 +4663,7 @@ HInstruction* HGraphBuilder::BuildExternalArrayElementAccess(
return new(zone()) HStoreKeyedSpecializedArrayElement(
external_elements, checked_key, val, elements_kind);
} else {
+ ASSERT(val == NULL);
return new(zone()) HLoadKeyedSpecializedArrayElement(
external_elements, checked_key, elements_kind);
}
@@ -4043,12 +4677,20 @@ HInstruction* HGraphBuilder::BuildFastElementAccess(HValue* elements,
bool is_store) {
if (is_store) {
ASSERT(val != NULL);
- if (elements_kind == FAST_DOUBLE_ELEMENTS) {
- return new(zone()) HStoreKeyedFastDoubleElement(
- elements, checked_key, val);
- } else { // FAST_ELEMENTS or FAST_SMI_ONLY_ELEMENTS.
- return new(zone()) HStoreKeyedFastElement(
- elements, checked_key, val, elements_kind);
+ switch (elements_kind) {
+ case FAST_DOUBLE_ELEMENTS:
+ return new(zone()) HStoreKeyedFastDoubleElement(
+ elements, checked_key, val);
+ case FAST_SMI_ONLY_ELEMENTS:
+ // Smi-only arrays need a smi check.
+ AddInstruction(new(zone()) HCheckSmi(val));
+ // Fall through.
+ case FAST_ELEMENTS:
+ return new(zone()) HStoreKeyedFastElement(
+ elements, checked_key, val, elements_kind);
+ default:
+ UNREACHABLE();
+ return NULL;
}
}
// It's an element load (!is_store).
@@ -4065,20 +4707,12 @@ HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
HValue* val,
Handle<Map> map,
bool is_store) {
- HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map));
+ HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMaps(object, map));
bool fast_smi_only_elements = map->has_fast_smi_only_elements();
bool fast_elements = map->has_fast_elements();
- bool fast_double_elements = map->has_fast_double_elements();
- if (!fast_smi_only_elements &&
- !fast_elements &&
- !fast_double_elements &&
- !map->has_external_array_elements()) {
- return is_store ? BuildStoreKeyedGeneric(object, key, val)
- : BuildLoadKeyedGeneric(object, key);
- }
HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
if (is_store && (fast_elements || fast_smi_only_elements)) {
- AddInstruction(new(zone()) HCheckMap(
+ AddInstruction(new(zone()) HCheckMaps(
elements, isolate()->factory()->fixed_array_map()));
}
HInstruction* length = NULL;
@@ -4092,7 +4726,9 @@ HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
return BuildExternalArrayElementAccess(external_elements, checked_key,
val, map->elements_kind(), is_store);
}
- ASSERT(fast_smi_only_elements || fast_elements || fast_double_elements);
+ ASSERT(fast_smi_only_elements ||
+ fast_elements ||
+ map->has_fast_double_elements());
if (map->instance_type() == JS_ARRAY_TYPE) {
length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
} else {
@@ -4149,7 +4785,7 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
Handle<Map> map = maps->at(i);
ASSERT(map->IsMap());
if (!transition_target.at(i).is_null()) {
- object = AddInstruction(new(zone()) HTransitionElementsKind(
+ AddInstruction(new(zone()) HTransitionElementsKind(
object, map, transition_target.at(i)));
} else {
type_todo[map->elements_kind()] = true;
@@ -4164,8 +4800,14 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
// If only one map is left after transitioning, handle this case
// monomorphically.
if (num_untransitionable_maps == 1) {
- HInstruction* instr = AddInstruction(BuildMonomorphicElementAccess(
- object, key, val, untransitionable_map, is_store));
+ HInstruction* instr = NULL;
+ if (untransitionable_map->has_slow_elements_kind()) {
+ instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val)
+ : BuildLoadKeyedGeneric(object, key));
+ } else {
+ instr = AddInstruction(BuildMonomorphicElementAccess(
+ object, key, val, untransitionable_map, is_store));
+ }
*has_side_effects |= instr->HasObservableSideEffects();
instr->set_position(position);
return is_store ? NULL : instr;
@@ -4219,11 +4861,8 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
if (elements_kind == FAST_SMI_ONLY_ELEMENTS ||
elements_kind == FAST_ELEMENTS ||
elements_kind == FAST_DOUBLE_ELEMENTS) {
- if (is_store && elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- AddInstruction(new(zone()) HCheckSmi(val));
- }
if (is_store && elements_kind != FAST_DOUBLE_ELEMENTS) {
- AddInstruction(new(zone()) HCheckMap(
+ AddInstruction(new(zone()) HCheckMaps(
elements, isolate()->factory()->fixed_array_map(),
elements_kind_branch));
}
@@ -4304,8 +4943,13 @@ HValue* HGraphBuilder::HandleKeyedElementAccess(HValue* obj,
HInstruction* instr = NULL;
if (expr->IsMonomorphic()) {
Handle<Map> map = expr->GetMonomorphicReceiverType();
- AddInstruction(new(zone()) HCheckNonSmi(obj));
- instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
+ if (map->has_slow_elements_kind()) {
+ instr = is_store ? BuildStoreKeyedGeneric(obj, key, val)
+ : BuildLoadKeyedGeneric(obj, key);
+ } else {
+ AddInstruction(new(zone()) HCheckNonSmi(obj));
+ instr = BuildMonomorphicElementAccess(obj, key, val, map, is_store);
+ }
} else if (expr->GetReceiverTypes() != NULL &&
!expr->GetReceiverTypes()->is_empty()) {
return HandlePolymorphicElementAccess(
@@ -4422,8 +5066,8 @@ void HGraphBuilder::VisitProperty(Property* expr) {
instr = BuildLoadNamed(obj, expr, types->first(), name);
} else if (types != NULL && types->length() > 1) {
AddInstruction(new(zone()) HCheckNonSmi(obj));
- HValue* context = environment()->LookupContext();
- instr = new(zone()) HLoadNamedFieldPolymorphic(context, obj, types, name);
+ HandlePolymorphicLoadNamedField(expr, obj, types, name);
+ return;
} else {
instr = BuildLoadNamedGeneric(obj, expr);
}
@@ -4464,7 +5108,7 @@ void HGraphBuilder::AddCheckConstantFunction(Call* expr,
// its prototypes.
if (smi_and_map_check) {
AddInstruction(new(zone()) HCheckNonSmi(receiver));
- AddInstruction(new(zone()) HCheckMap(receiver, receiver_map));
+ AddInstruction(HCheckMaps::NewWithTransitions(receiver, receiver_map));
}
if (!expr->holder().is_null()) {
AddInstruction(new(zone()) HCheckPrototypeMaps(
@@ -4505,7 +5149,7 @@ void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
PrintF("Trying to inline the polymorphic call to %s\n",
*name->ToCString());
}
- if (FLAG_polymorphic_inlining && TryInline(expr)) {
+ if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
// Trying to inline will signal that we should bailout from the
// entire compilation by setting stack overflow on the visitor.
if (HasStackOverflow()) return;
@@ -4575,24 +5219,24 @@ void HGraphBuilder::TraceInline(Handle<JSFunction> target,
}
-bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
+bool HGraphBuilder::TryInline(CallKind call_kind,
+ Handle<JSFunction> target,
+ ZoneList<Expression*>* arguments,
+ HValue* receiver,
+ int ast_id,
+ int return_id,
+ ReturnHandlingFlag return_handling) {
if (!FLAG_use_inlining) return false;
- // The function call we are inlining is a method call if the call
- // is a property call.
- CallKind call_kind = (expr->expression()->AsProperty() == NULL)
- ? CALL_AS_FUNCTION
- : CALL_AS_METHOD;
-
// Precondition: call is monomorphic and we have found a target with the
// appropriate arity.
Handle<JSFunction> caller = info()->closure();
- Handle<JSFunction> target = expr->target();
Handle<SharedFunctionInfo> target_shared(target->shared());
// Do a quick check on source code length to avoid parsing large
// inlining candidates.
- if (FLAG_limit_inlining && target->shared()->SourceSize() > kMaxSourceSize) {
+ if ((FLAG_limit_inlining && target_shared->SourceSize() > kMaxSourceSize)
+ || target_shared->SourceSize() > kUnlimitedMaxSourceSize) {
TraceInline(target, caller, "target text too big");
return false;
}
@@ -4602,6 +5246,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "target not inlineable");
return false;
}
+ if (target_shared->dont_inline() || target_shared->dont_optimize()) {
+ TraceInline(target, caller, "target contains unsupported syntax [early]");
+ return false;
+ }
+
+ int nodes_added = target_shared->ast_node_count();
+ if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
+ nodes_added > kUnlimitedMaxInlinedSize) {
+ TraceInline(target, caller, "target AST is too large [early]");
+ return false;
+ }
#if !defined(V8_TARGET_ARCH_IA32)
// Target must be able to use caller's context.
@@ -4623,7 +5278,9 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "inline depth limit reached");
return false;
}
- current_level++;
+ if (env->outer()->frame_type() == JS_FUNCTION) {
+ current_level++;
+ }
env = env->outer();
}
@@ -4638,21 +5295,20 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
// We don't want to add more than a certain number of nodes from inlining.
- if (FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) {
+ if ((FLAG_limit_inlining && inlined_count_ > kMaxInlinedNodes) ||
+ inlined_count_ > kUnlimitedMaxInlinedNodes) {
TraceInline(target, caller, "cumulative AST node limit reached");
return false;
}
- int count_before = AstNode::Count();
-
// Parse and allocate variables.
CompilationInfo target_info(target);
- if (!ParserApi::Parse(&target_info) ||
+ if (!ParserApi::Parse(&target_info, kNoParsingFlags) ||
!Scope::Analyze(&target_info)) {
if (target_info.isolate()->has_pending_exception()) {
// Parse or scope error, never optimize this function.
SetStackOverflow();
- target_shared->DisableOptimization(*target);
+ target_shared->DisableOptimization();
}
TraceInline(target, caller, "parse failure");
return false;
@@ -4664,22 +5320,37 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
FunctionLiteral* function = target_info.function();
- // Count the number of AST nodes added by inlining this call.
- int nodes_added = AstNode::Count() - count_before;
- if (FLAG_limit_inlining && nodes_added > kMaxInlinedSize) {
- TraceInline(target, caller, "target AST is too large");
+ // The following conditions must be checked again after re-parsing, because
+ // earlier the information might not have been complete due to lazy parsing.
+ nodes_added = function->ast_node_count();
+ if ((FLAG_limit_inlining && nodes_added > kMaxInlinedSize) ||
+ nodes_added > kUnlimitedMaxInlinedSize) {
+ TraceInline(target, caller, "target AST is too large [late]");
return false;
}
-
- // Don't inline functions that uses the arguments object or that
- // have a mismatching number of parameters.
- int arity = expr->arguments()->length();
- if (function->scope()->arguments() != NULL ||
- arity != target_shared->formal_parameter_count()) {
- TraceInline(target, caller, "target requires special argument handling");
+ AstProperties::Flags* flags(function->flags());
+ if (flags->Contains(kDontInline) || flags->Contains(kDontOptimize)) {
+ TraceInline(target, caller, "target contains unsupported syntax [late]");
return false;
}
+ // If the function uses the arguments object check that inlining of functions
+ // with arguments object is enabled and the arguments-variable is
+ // stack allocated.
+ if (function->scope()->arguments() != NULL) {
+ if (!FLAG_inline_arguments) {
+ TraceInline(target, caller, "target uses arguments object");
+ return false;
+ }
+
+ if (!function->scope()->arguments()->IsStackAllocated()) {
+ TraceInline(target,
+ caller,
+ "target uses non-stackallocated arguments object");
+ return false;
+ }
+ }
+
// All declarations must be inlineable.
ZoneList<Declaration*>* decls = target_info.scope()->declarations();
int decl_count = decls->length();
@@ -4689,13 +5360,6 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return false;
}
}
- // All statements in the body must be inlineable.
- for (int i = 0, count = function->body()->length(); i < count; ++i) {
- if (!function->body()->at(i)->IsInlineable()) {
- TraceInline(target, caller, "target contains unsupported syntax");
- return false;
- }
- }
// Generate the deoptimization data for the unoptimized version of
// the target function if we don't already have it.
@@ -4707,11 +5371,11 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, "could not generate deoptimization info");
return false;
}
- if (target_shared->scope_info() == SerializedScopeInfo::Empty()) {
+ if (target_shared->scope_info() == ScopeInfo::Empty()) {
// The scope info might not have been set if a lazily compiled
// function is inlined before being called for the first time.
- Handle<SerializedScopeInfo> target_scope_info =
- SerializedScopeInfo::Create(target_info.scope());
+ Handle<ScopeInfo> target_scope_info =
+ ScopeInfo::Create(target_info.scope());
target_shared->set_scope_info(*target_scope_info);
}
target_shared->EnableDeoptimizationSupport(*target_info.code());
@@ -4733,15 +5397,17 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
isolate());
// The function state is new-allocated because we need to delete it
// in two different places.
- FunctionState* target_state =
- new FunctionState(this, &target_info, &target_oracle, drop_extra);
+ FunctionState* target_state = new FunctionState(
+ this, &target_info, &target_oracle, return_handling);
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner_env =
environment()->CopyForInlining(target,
+ arguments->length(),
function,
undefined,
- call_kind);
+ call_kind,
+ function_state()->is_construct());
#ifdef V8_TARGET_ARCH_IA32
// IA32 only, overwrite the caller's context in the deoptimization
// environment with the correct one.
@@ -4753,20 +5419,27 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
AddInstruction(context);
inner_env->BindContext(context);
#endif
- HBasicBlock* body_entry = CreateBasicBlock(inner_env);
- current_block()->Goto(body_entry);
- body_entry->SetJoinId(expr->ReturnId());
- set_current_block(body_entry);
+ AddSimulate(return_id);
+ current_block()->UpdateEnvironment(inner_env);
AddInstruction(new(zone()) HEnterInlined(target,
+ arguments->length(),
function,
- call_kind));
+ call_kind,
+ function_state()->is_construct(),
+ function->scope()->arguments()));
+ // If the function uses arguments object create and bind one.
+ if (function->scope()->arguments() != NULL) {
+ ASSERT(function->scope()->arguments()->IsStackAllocated());
+ environment()->Bind(function->scope()->arguments(),
+ graph()->GetArgumentsObject());
+ }
VisitDeclarations(target_info.scope()->declarations());
VisitStatements(function->body());
if (HasStackOverflow()) {
// Bail out if the inline function did, as we cannot residualize a call
// instead.
TraceInline(target, caller, "inline graph construction failed");
- target_shared->DisableOptimization(*target);
+ target_shared->DisableOptimization();
inline_bailout_ = true;
delete target_state;
return true;
@@ -4778,32 +5451,27 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
TraceInline(target, caller, NULL);
if (current_block() != NULL) {
- // Add a return of undefined if control can fall off the body. In a
- // test context, undefined is false.
- if (inlined_test_context() == NULL) {
+ // Add default return value (i.e. undefined for normals calls or the newly
+ // allocated receiver for construct calls) if control can fall off the
+ // body. In a test context, undefined is false and any JSObject is true.
+ if (call_context()->IsValue()) {
ASSERT(function_return() != NULL);
- ASSERT(call_context()->IsEffect() || call_context()->IsValue());
- if (call_context()->IsEffect()) {
- current_block()->Goto(function_return(), drop_extra);
- } else {
- current_block()->AddLeaveInlined(undefined,
- function_return(),
- drop_extra);
- }
+ HValue* return_value = function_state()->is_construct()
+ ? receiver
+ : undefined;
+ current_block()->AddLeaveInlined(return_value,
+ function_return(),
+ function_state()->drop_extra());
+ } else if (call_context()->IsEffect()) {
+ ASSERT(function_return() != NULL);
+ current_block()->Goto(function_return(), function_state()->drop_extra());
} else {
- // The graph builder assumes control can reach both branches of a
- // test, so we materialize the undefined value and test it rather than
- // simply jumping to the false target.
- //
- // TODO(3168478): refactor to avoid this.
ASSERT(call_context()->IsTest());
- HBasicBlock* empty_true = graph()->CreateBasicBlock();
- HBasicBlock* empty_false = graph()->CreateBasicBlock();
- HBranch* test = new(zone()) HBranch(undefined, empty_true, empty_false);
- current_block()->Finish(test);
-
- empty_true->Goto(inlined_test_context()->if_true(), drop_extra);
- empty_false->Goto(inlined_test_context()->if_false(), drop_extra);
+ ASSERT(inlined_test_context() != NULL);
+ HBasicBlock* target = function_state()->is_construct()
+ ? inlined_test_context()->if_true()
+ : inlined_test_context()->if_false();
+ current_block()->Goto(target, function_state()->drop_extra());
}
}
@@ -4819,12 +5487,12 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
// Forward to the real test context.
if (if_true->HasPredecessor()) {
- if_true->SetJoinId(expr->id());
+ if_true->SetJoinId(ast_id);
HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
if_true->Goto(true_target, function_state()->drop_extra());
}
if (if_false->HasPredecessor()) {
- if_false->SetJoinId(expr->id());
+ if_false->SetJoinId(ast_id);
HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
if_false->Goto(false_target, function_state()->drop_extra());
}
@@ -4832,7 +5500,7 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
return true;
} else if (function_return()->HasPredecessor()) {
- function_return()->SetJoinId(expr->id());
+ function_return()->SetJoinId(ast_id);
set_current_block(function_return());
} else {
set_current_block(NULL);
@@ -4842,10 +5510,69 @@ bool HGraphBuilder::TryInline(Call* expr, bool drop_extra) {
}
-bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
- HValue* receiver,
- Handle<Map> receiver_map,
- CheckType check_type) {
+bool HGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
+ // The function call we are inlining is a method call if the call
+ // is a property call.
+ CallKind call_kind = (expr->expression()->AsProperty() == NULL)
+ ? CALL_AS_FUNCTION
+ : CALL_AS_METHOD;
+
+ return TryInline(call_kind,
+ expr->target(),
+ expr->arguments(),
+ NULL,
+ expr->id(),
+ expr->ReturnId(),
+ drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
+}
+
+
+bool HGraphBuilder::TryInlineConstruct(CallNew* expr, HValue* receiver) {
+ return TryInline(CALL_AS_FUNCTION,
+ expr->target(),
+ expr->arguments(),
+ receiver,
+ expr->id(),
+ expr->ReturnId(),
+ CONSTRUCT_CALL_RETURN);
+}
+
+
+bool HGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra) {
+ if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
+ BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
+ switch (id) {
+ case kMathRound:
+ case kMathAbs:
+ case kMathSqrt:
+ case kMathLog:
+ case kMathSin:
+ case kMathCos:
+ case kMathTan:
+ if (expr->arguments()->length() == 1) {
+ HValue* argument = Pop();
+ HValue* context = environment()->LookupContext();
+ Drop(1); // Receiver.
+ HUnaryMathOperation* op =
+ new(zone()) HUnaryMathOperation(context, argument, id);
+ op->set_position(expr->position());
+ if (drop_extra) Drop(1); // Optionally drop the function.
+ ast_context()->ReturnInstruction(op, expr->id());
+ return true;
+ }
+ break;
+ default:
+ // Not supported for inlining yet.
+ break;
+ }
+ return false;
+}
+
+
+bool HGraphBuilder::TryInlineBuiltinMethodCall(Call* expr,
+ HValue* receiver,
+ Handle<Map> receiver_map,
+ CheckType check_type) {
ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
// Try to inline calls like Math.* as operations in the calling function.
if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
@@ -4882,6 +5609,7 @@ bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
case kMathLog:
case kMathSin:
case kMathCos:
+ case kMathTan:
if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
HValue* argument = Pop();
@@ -4936,6 +5664,100 @@ bool HGraphBuilder::TryInlineBuiltinFunction(Call* expr,
return true;
}
break;
+ case kMathRandom:
+ if (argument_count == 1 && check_type == RECEIVER_MAP_CHECK) {
+ AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ Drop(1); // Receiver.
+ HValue* context = environment()->LookupContext();
+ HGlobalObject* global_object = new(zone()) HGlobalObject(context);
+ AddInstruction(global_object);
+ HRandom* result = new(zone()) HRandom(global_object);
+ ast_context()->ReturnInstruction(result, expr->id());
+ return true;
+ }
+ break;
+ case kMathMax:
+ case kMathMin:
+ if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
+ AddCheckConstantFunction(expr, receiver, receiver_map, true);
+ HValue* right = Pop();
+ HValue* left = Pop();
+ Pop(); // Pop receiver.
+
+ HValue* left_operand = left;
+ HValue* right_operand = right;
+
+ // If we do not have two integers, we convert to double for comparison.
+ if (!left->representation().IsInteger32() ||
+ !right->representation().IsInteger32()) {
+ if (!left->representation().IsDouble()) {
+ HChange* left_convert = new(zone()) HChange(
+ left,
+ Representation::Double(),
+ false, // Do not truncate when converting to double.
+ true); // Deoptimize for undefined.
+ left_convert->SetFlag(HValue::kBailoutOnMinusZero);
+ left_operand = AddInstruction(left_convert);
+ }
+ if (!right->representation().IsDouble()) {
+ HChange* right_convert = new(zone()) HChange(
+ right,
+ Representation::Double(),
+ false, // Do not truncate when converting to double.
+ true); // Deoptimize for undefined.
+ right_convert->SetFlag(HValue::kBailoutOnMinusZero);
+ right_operand = AddInstruction(right_convert);
+ }
+ }
+
+ ASSERT(left_operand->representation().Equals(
+ right_operand->representation()));
+ ASSERT(!left_operand->representation().IsTagged());
+
+ Token::Value op = (id == kMathMin) ? Token::LT : Token::GT;
+
+ HCompareIDAndBranch* compare =
+ new(zone()) HCompareIDAndBranch(left_operand, right_operand, op);
+ compare->SetInputRepresentation(left_operand->representation());
+
+ HBasicBlock* return_left = graph()->CreateBasicBlock();
+ HBasicBlock* return_right = graph()->CreateBasicBlock();
+
+ compare->SetSuccessorAt(0, return_left);
+ compare->SetSuccessorAt(1, return_right);
+ current_block()->Finish(compare);
+
+ set_current_block(return_left);
+ Push(left);
+ set_current_block(return_right);
+ // The branch above always returns the right operand if either of
+ // them is NaN, but the spec requires that max/min(NaN, X) = NaN.
+ // We add another branch that checks if the left operand is NaN or not.
+ if (left_operand->representation().IsDouble()) {
+ // If left_operand != left_operand then it is NaN.
+ HCompareIDAndBranch* compare_nan = new(zone()) HCompareIDAndBranch(
+ left_operand, left_operand, Token::EQ);
+ compare_nan->SetInputRepresentation(left_operand->representation());
+ HBasicBlock* left_is_number = graph()->CreateBasicBlock();
+ HBasicBlock* left_is_nan = graph()->CreateBasicBlock();
+ compare_nan->SetSuccessorAt(0, left_is_number);
+ compare_nan->SetSuccessorAt(1, left_is_nan);
+ current_block()->Finish(compare_nan);
+ set_current_block(left_is_nan);
+ Push(left);
+ set_current_block(left_is_number);
+ Push(right);
+ return_right = CreateJoin(left_is_number, left_is_nan, expr->id());
+ } else {
+ Push(right);
+ }
+
+ HBasicBlock* join = CreateJoin(return_left, return_right, expr->id());
+ set_current_block(join);
+ ast_context()->ReturnValue(Pop());
+ return true;
+ }
+ break;
default:
// Not yet supported for inlining.
break;
@@ -4969,13 +5791,6 @@ bool HGraphBuilder::TryCallApply(Call* expr) {
HValue* arg_two_value = environment()->Lookup(arg_two->var());
if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
- // Our implementation of arguments (based on this stack frame or an
- // adapter below it) does not work for inlined functions.
- if (function_state()->outer() != NULL) {
- Bailout("Function.prototype.apply optimization in inlined function");
- return true;
- }
-
// Found pattern f.apply(receiver, arguments).
VisitForValue(prop->obj());
if (HasStackOverflow() || current_block() == NULL) return true;
@@ -4986,13 +5801,46 @@ bool HGraphBuilder::TryCallApply(Call* expr) {
VisitForValue(args->at(0));
if (HasStackOverflow() || current_block() == NULL) return true;
HValue* receiver = Pop();
- HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
- HInstruction* length = AddInstruction(new(zone()) HArgumentsLength(elements));
- HInstruction* result =
- new(zone()) HApplyArguments(function, receiver, length, elements);
- result->set_position(expr->position());
- ast_context()->ReturnInstruction(result, expr->id());
- return true;
+
+ if (function_state()->outer() == NULL) {
+ HInstruction* elements = AddInstruction(new(zone()) HArgumentsElements);
+ HInstruction* length =
+ AddInstruction(new(zone()) HArgumentsLength(elements));
+ HValue* wrapped_receiver =
+ AddInstruction(new(zone()) HWrapReceiver(receiver, function));
+ HInstruction* result =
+ new(zone()) HApplyArguments(function,
+ wrapped_receiver,
+ length,
+ elements);
+ result->set_position(expr->position());
+ ast_context()->ReturnInstruction(result, expr->id());
+ return true;
+ } else {
+ // We are inside inlined function and we know exactly what is inside
+ // arguments object.
+ HValue* context = environment()->LookupContext();
+
+ HValue* wrapped_receiver =
+ AddInstruction(new(zone()) HWrapReceiver(receiver, function));
+ PushAndAdd(new(zone()) HPushArgument(wrapped_receiver));
+
+ HEnvironment* arguments_env = environment()->arguments_environment();
+
+ int parameter_count = arguments_env->parameter_count();
+ for (int i = 1; i < arguments_env->parameter_count(); i++) {
+ PushAndAdd(new(zone()) HPushArgument(arguments_env->Lookup(i)));
+ }
+
+ HInvokeFunction* call = new(zone()) HInvokeFunction(
+ context,
+ function,
+ parameter_count);
+ Drop(parameter_count);
+ call->set_position(expr->position());
+ ast_context()->ReturnInstruction(call, expr->id());
+ return true;
+ }
}
@@ -5044,10 +5892,15 @@ void HGraphBuilder::VisitCall(Call* expr) {
Handle<Map> receiver_map = (types == NULL || types->is_empty())
? Handle<Map>::null()
: types->first();
- if (TryInlineBuiltinFunction(expr,
- receiver,
- receiver_map,
- expr->check_type())) {
+ if (TryInlineBuiltinMethodCall(expr,
+ receiver,
+ receiver_map,
+ expr->check_type())) {
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
return;
}
@@ -5062,7 +5915,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
} else {
AddCheckConstantFunction(expr, receiver, receiver_map, true);
- if (TryInline(expr)) return;
+ if (TryInlineCall(expr)) return;
call = PreProcessCall(
new(zone()) HCallConstantFunction(expr->target(),
argument_count));
@@ -5118,19 +5971,25 @@ void HGraphBuilder::VisitCall(Call* expr) {
IsGlobalObject());
environment()->SetExpressionStackAt(receiver_index, global_receiver);
- if (TryInline(expr)) return;
+ if (TryInlineBuiltinFunctionCall(expr, false)) { // Nothing to drop.
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
+ return;
+ }
+ if (TryInlineCall(expr)) return;
call = PreProcessCall(new(zone()) HCallKnownGlobal(expr->target(),
argument_count));
} else {
HValue* context = environment()->LookupContext();
HGlobalObject* receiver = new(zone()) HGlobalObject(context);
- if (var->is_qml_global()) receiver->set_qml_global(true);
AddInstruction(receiver);
PushAndAdd(new(zone()) HPushArgument(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
call = new(zone()) HCallGlobal(context, var->name(), argument_count);
- if (var->is_qml_global()) static_cast<HCallGlobal*>(call)->set_qml_global(true);
Drop(argument_count);
}
@@ -5146,7 +6005,17 @@ void HGraphBuilder::VisitCall(Call* expr) {
PushAndAdd(receiver);
CHECK_ALIVE(VisitExpressions(expr->arguments()));
AddInstruction(new(zone()) HCheckFunction(function, expr->target()));
- if (TryInline(expr, true)) { // Drop function from environment.
+
+ if (TryInlineBuiltinFunctionCall(expr, true)) { // Drop the function.
+ if (FLAG_trace_inlining) {
+ PrintF("Inlining builtin ");
+ expr->target()->ShortPrint();
+ PrintF("\n");
+ }
+ return;
+ }
+
+ if (TryInlineCall(expr, true)) { // Drop function from environment.
return;
} else {
call = PreProcessCall(new(zone()) HInvokeFunction(context,
@@ -5156,7 +6025,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
}
} else {
- CHECK_ALIVE(VisitArgument(expr->expression()));
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ HValue* function = Top();
HValue* context = environment()->LookupContext();
HGlobalObject* global_object = new(zone()) HGlobalObject(context);
HGlobalReceiver* receiver = new(zone()) HGlobalReceiver(global_object);
@@ -5165,9 +6035,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
PushAndAdd(new(zone()) HPushArgument(receiver));
CHECK_ALIVE(VisitArgumentList(expr->arguments()));
- // The function to call is treated as an argument to the call function
- // stub.
- call = new(zone()) HCallFunction(context, argument_count + 1);
+ call = new(zone()) HCallFunction(context, function, argument_count);
Drop(argument_count + 1);
}
}
@@ -5177,25 +6045,73 @@ void HGraphBuilder::VisitCall(Call* expr) {
}
+// Checks whether allocation using the given constructor can be inlined.
+static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
+ return constructor->has_initial_map() &&
+ constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
+ constructor->initial_map()->instance_size() < HAllocateObject::kMaxSize;
+}
+
+
void HGraphBuilder::VisitCallNew(CallNew* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- // The constructor function is also used as the receiver argument to the
- // JS construct call builtin.
- HValue* constructor = NULL;
- CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
- CHECK_ALIVE(VisitArgumentList(expr->arguments()));
-
+ expr->RecordTypeFeedback(oracle());
+ int argument_count = expr->arguments()->length() + 1; // Plus constructor.
HValue* context = environment()->LookupContext();
- // The constructor is both an operand to the instruction and an argument
- // to the construct call.
- int arg_count = expr->arguments()->length() + 1; // Plus constructor.
- HCallNew* call = new(zone()) HCallNew(context, constructor, arg_count);
- call->set_position(expr->position());
- Drop(arg_count);
- return ast_context()->ReturnInstruction(call, expr->id());
+ if (FLAG_inline_construct &&
+ expr->IsMonomorphic() &&
+ IsAllocationInlineable(expr->target())) {
+ // The constructor function is on the stack in the unoptimized code
+ // during evaluation of the arguments.
+ CHECK_ALIVE(VisitForValue(expr->expression()));
+ HValue* function = Top();
+ CHECK_ALIVE(VisitExpressions(expr->arguments()));
+ Handle<JSFunction> constructor = expr->target();
+ HValue* check = AddInstruction(
+ new(zone()) HCheckFunction(function, constructor));
+
+ // Force completion of inobject slack tracking before generating
+ // allocation code to finalize instance size.
+ if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
+ constructor->shared()->CompleteInobjectSlackTracking();
+ }
+
+ // Replace the constructor function with a newly allocated receiver.
+ HInstruction* receiver = new(zone()) HAllocateObject(context, constructor);
+ // Index of the receiver from the top of the expression stack.
+ const int receiver_index = argument_count - 1;
+ AddInstruction(receiver);
+ ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
+ environment()->SetExpressionStackAt(receiver_index, receiver);
+
+ if (TryInlineConstruct(expr, receiver)) return;
+
+ // TODO(mstarzinger): For now we remove the previous HAllocateObject and
+ // add HPushArgument for the arguments in case inlining failed. What we
+ // actually should do is emit HInvokeFunction on the constructor instead
+ // of using HCallNew as a fallback.
+ receiver->DeleteAndReplaceWith(NULL);
+ check->DeleteAndReplaceWith(NULL);
+ environment()->SetExpressionStackAt(receiver_index, function);
+ HInstruction* call = PreProcessCall(
+ new(zone()) HCallNew(context, function, argument_count));
+ call->set_position(expr->position());
+ return ast_context()->ReturnInstruction(call, expr->id());
+ } else {
+ // The constructor function is both an operand to the instruction and an
+ // argument to the construct call.
+ HValue* constructor = NULL;
+ CHECK_ALIVE(constructor = VisitArgument(expr->expression()));
+ CHECK_ALIVE(VisitArgumentList(expr->arguments()));
+ HInstruction* call =
+ new(zone()) HCallNew(context, constructor, argument_count);
+ Drop(argument_count);
+ call->set_position(expr->position());
+ return ast_context()->ReturnInstruction(call, expr->id());
+ }
}
@@ -5501,8 +6417,11 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) {
}
HValue* context = BuildContextChainWalk(var);
+ HStoreContextSlot::Mode mode =
+ (var->mode() == LET || var->mode() == CONST_HARMONY)
+ ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
HStoreContextSlot* instr =
- new(zone()) HStoreContextSlot(context, var->index(), after);
+ new(zone()) HStoreContextSlot(context, var->index(), mode, after);
AddInstruction(instr);
if (instr->HasObservableSideEffects()) {
AddSimulate(expr->AssignmentId());
@@ -5625,34 +6544,34 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(BinaryOperation* expr,
AddInstruction(HCheckInstanceType::NewIsString(right));
instr = new(zone()) HStringAdd(context, left, right);
} else {
- instr = new(zone()) HAdd(context, left, right);
+ instr = HAdd::NewHAdd(zone(), context, left, right);
}
break;
case Token::SUB:
- instr = new(zone()) HSub(context, left, right);
+ instr = HSub::NewHSub(zone(), context, left, right);
break;
case Token::MUL:
- instr = new(zone()) HMul(context, left, right);
+ instr = HMul::NewHMul(zone(), context, left, right);
break;
case Token::MOD:
- instr = new(zone()) HMod(context, left, right);
+ instr = HMod::NewHMod(zone(), context, left, right);
break;
case Token::DIV:
- instr = new(zone()) HDiv(context, left, right);
+ instr = HDiv::NewHDiv(zone(), context, left, right);
break;
case Token::BIT_XOR:
case Token::BIT_AND:
case Token::BIT_OR:
- instr = new(zone()) HBitwise(expr->op(), context, left, right);
+ instr = HBitwise::NewHBitwise(zone(), expr->op(), context, left, right);
break;
case Token::SAR:
- instr = new(zone()) HSar(context, left, right);
+ instr = HSar::NewHSar(zone(), context, left, right);
break;
case Token::SHR:
- instr = new(zone()) HShr(context, left, right);
+ instr = HShr::NewHShr(zone(), context, left, right);
break;
case Token::SHL:
- instr = new(zone()) HShl(context, left, right);
+ instr = HShl::NewHShl(zone(), context, left, right);
break;
default:
UNREACHABLE();
@@ -5908,6 +6827,15 @@ static bool IsLiteralCompareNil(HValue* left,
}
+static bool IsLiteralCompareBool(HValue* left,
+ Token::Value op,
+ HValue* right) {
+ return op == Token::EQ_STRICT &&
+ ((left->IsConstant() && HConstant::cast(left)->handle()->IsBoolean()) ||
+ (right->IsConstant() && HConstant::cast(right)->handle()->IsBoolean()));
+}
+
+
void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
@@ -5955,6 +6883,12 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
if (IsLiteralCompareNil(left, op, right, f->null_value(), &sub_expr)) {
return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
}
+ if (IsLiteralCompareBool(left, op, right)) {
+ HCompareObjectEqAndBranch* result =
+ new(zone()) HCompareObjectEqAndBranch(left, right);
+ result->set_position(expr->position());
+ return ast_context()->ReturnControl(result, expr->id());
+ }
if (op == Token::INSTANCEOF) {
// Check to see if the rhs of the instanceof is a global function not
@@ -5970,7 +6904,7 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
Handle<GlobalObject> global(info()->global_object());
LookupResult lookup(isolate());
global->Lookup(*name, &lookup);
- if (lookup.IsProperty() &&
+ if (lookup.IsFound() &&
lookup.type() == NORMAL &&
lookup.GetValue()->IsJSFunction()) {
Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
@@ -6003,14 +6937,27 @@ void HGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
switch (op) {
case Token::EQ:
case Token::EQ_STRICT: {
- AddInstruction(new(zone()) HCheckNonSmi(left));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(left));
- AddInstruction(new(zone()) HCheckNonSmi(right));
- AddInstruction(HCheckInstanceType::NewIsSpecObject(right));
- HCompareObjectEqAndBranch* result =
- new(zone()) HCompareObjectEqAndBranch(left, right);
- result->set_position(expr->position());
- return ast_context()->ReturnControl(result, expr->id());
+ // Can we get away with map check and not instance type check?
+ Handle<Map> map = oracle()->GetCompareMap(expr);
+ if (!map.is_null()) {
+ AddInstruction(new(zone()) HCheckNonSmi(left));
+ AddInstruction(HCheckMaps::NewWithTransitions(left, map));
+ AddInstruction(new(zone()) HCheckNonSmi(right));
+ AddInstruction(HCheckMaps::NewWithTransitions(right, map));
+ HCompareObjectEqAndBranch* result =
+ new(zone()) HCompareObjectEqAndBranch(left, right);
+ result->set_position(expr->position());
+ return ast_context()->ReturnControl(result, expr->id());
+ } else {
+ AddInstruction(new(zone()) HCheckNonSmi(left));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(left));
+ AddInstruction(new(zone()) HCheckNonSmi(right));
+ AddInstruction(HCheckInstanceType::NewIsSpecObject(right));
+ HCompareObjectEqAndBranch* result =
+ new(zone()) HCompareObjectEqAndBranch(left, right);
+ result->set_position(expr->position());
+ return ast_context()->ReturnControl(result, expr->id());
+ }
}
default:
return Bailout("Unsupported non-primitive compare");
@@ -6067,36 +7014,86 @@ void HGraphBuilder::VisitThisFunction(ThisFunction* expr) {
}
-void HGraphBuilder::VisitDeclaration(Declaration* decl) {
- HandleDeclaration(decl->proxy(), decl->mode(), decl->fun());
+void HGraphBuilder::VisitDeclarations(ZoneList<Declaration*>* declarations) {
+ int length = declarations->length();
+ int global_count = 0;
+ for (int i = 0; i < declarations->length(); i++) {
+ Declaration* decl = declarations->at(i);
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ HandleDeclaration(decl->proxy(),
+ decl->mode(),
+ fun_decl != NULL ? fun_decl->fun() : NULL,
+ &global_count);
+ }
+
+ // Batch declare global functions and variables.
+ if (global_count > 0) {
+ Handle<FixedArray> array =
+ isolate()->factory()->NewFixedArray(2 * global_count, TENURED);
+ for (int j = 0, i = 0; i < length; i++) {
+ Declaration* decl = declarations->at(i);
+ Variable* var = decl->proxy()->var();
+
+ if (var->IsUnallocated()) {
+ array->set(j++, *(var->name()));
+ FunctionDeclaration* fun_decl = decl->AsFunctionDeclaration();
+ if (fun_decl == NULL) {
+ if (var->binding_needs_init()) {
+ // In case this binding needs initialization use the hole.
+ array->set_the_hole(j++);
+ } else {
+ array->set_undefined(j++);
+ }
+ } else {
+ Handle<SharedFunctionInfo> function =
+ Compiler::BuildFunctionInfo(fun_decl->fun(), info()->script());
+ // Check for stack-overflow exception.
+ if (function.is_null()) {
+ SetStackOverflow();
+ return;
+ }
+ array->set(j++, *function);
+ }
+ }
+ }
+ int flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
+ DeclareGlobalsNativeFlag::encode(info()->is_native()) |
+ DeclareGlobalsLanguageMode::encode(info()->language_mode());
+ HInstruction* result =
+ new(zone()) HDeclareGlobals(environment()->LookupContext(),
+ array,
+ flags);
+ AddInstruction(result);
+ }
}
void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function) {
- if (mode == LET || mode == CONST_HARMONY) {
- return Bailout("unsupported harmony declaration");
- }
+ FunctionLiteral* function,
+ int* global_count) {
Variable* var = proxy->var();
+ bool binding_needs_init =
+ (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (var->location()) {
case Variable::UNALLOCATED:
- return Bailout("unsupported global declaration");
+ ++(*global_count);
+ return;
case Variable::PARAMETER:
case Variable::LOCAL:
case Variable::CONTEXT:
- if (mode == CONST || function != NULL) {
+ if (binding_needs_init || function != NULL) {
HValue* value = NULL;
- if (mode == CONST) {
- value = graph()->GetConstantHole();
- } else {
- VisitForValue(function);
+ if (function != NULL) {
+ CHECK_ALIVE(VisitForValue(function));
value = Pop();
+ } else {
+ value = graph()->GetConstantHole();
}
if (var->IsContextSlot()) {
HValue* context = environment()->LookupContext();
- HStoreContextSlot* store =
- new HStoreContextSlot(context, var->index(), value);
+ HStoreContextSlot* store = new HStoreContextSlot(
+ context, var->index(), HStoreContextSlot::kNoCheck, value);
AddInstruction(store);
if (store->HasObservableSideEffects()) AddSimulate(proxy->id());
} else {
@@ -6110,6 +7107,51 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy,
}
+void HGraphBuilder::VisitVariableDeclaration(VariableDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitFunctionDeclaration(FunctionDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitModuleDeclaration(ModuleDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitImportDeclaration(ImportDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitExportDeclaration(ExportDeclaration* decl) {
+ UNREACHABLE();
+}
+
+
+void HGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModulePath(ModulePath* module) {
+ // TODO(rossberg)
+}
+
+
+void HGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
+ // TODO(rossberg)
+}
+
+
// Generators for inline runtime functions.
// Support for types.
void HGraphBuilder::GenerateIsSmi(CallRuntime* call) {
@@ -6208,10 +7250,11 @@ void HGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
void HGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
ASSERT(call->arguments()->length() == 0);
if (function_state()->outer() != NULL) {
- // We are generating graph for inlined function. Currently
- // constructor inlining is not supported and we can just return
- // false from %_IsConstructCall().
- return ast_context()->ReturnValue(graph()->GetConstantFalse());
+ // We are generating graph for inlined function.
+ HValue* value = function_state()->is_construct()
+ ? graph()->GetConstantTrue()
+ : graph()->GetConstantFalse();
+ return ast_context()->ReturnValue(value);
} else {
return ast_context()->ReturnControl(new(zone()) HIsConstructCallAndBranch,
call->id());
@@ -6265,8 +7308,56 @@ void HGraphBuilder::GenerateValueOf(CallRuntime* call) {
}
+void HGraphBuilder::GenerateDateField(CallRuntime* call) {
+ ASSERT(call->arguments()->length() == 2);
+ ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->handle()));
+ CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ HValue* date = Pop();
+ HDateField* result = new(zone()) HDateField(date, index);
+ return ast_context()->ReturnInstruction(result, call->id());
+}
+
+
void HGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
- return Bailout("inlined runtime function: SetValueOf");
+ ASSERT(call->arguments()->length() == 2);
+ CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
+ CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
+ HValue* value = Pop();
+ HValue* object = Pop();
+ // Check if object is a not a smi.
+ HIsSmiAndBranch* smicheck = new(zone()) HIsSmiAndBranch(object);
+ HBasicBlock* if_smi = graph()->CreateBasicBlock();
+ HBasicBlock* if_heap_object = graph()->CreateBasicBlock();
+ HBasicBlock* join = graph()->CreateBasicBlock();
+ smicheck->SetSuccessorAt(0, if_smi);
+ smicheck->SetSuccessorAt(1, if_heap_object);
+ current_block()->Finish(smicheck);
+ if_smi->Goto(join);
+
+ // Check if object is a JSValue.
+ set_current_block(if_heap_object);
+ HHasInstanceTypeAndBranch* typecheck =
+ new(zone()) HHasInstanceTypeAndBranch(object, JS_VALUE_TYPE);
+ HBasicBlock* if_js_value = graph()->CreateBasicBlock();
+ HBasicBlock* not_js_value = graph()->CreateBasicBlock();
+ typecheck->SetSuccessorAt(0, if_js_value);
+ typecheck->SetSuccessorAt(1, not_js_value);
+ current_block()->Finish(typecheck);
+ not_js_value->Goto(join);
+
+ // Create in-object property store to kValueOffset.
+ set_current_block(if_js_value);
+ Handle<String> name = isolate()->factory()->undefined_symbol();
+ AddInstruction(new HStoreNamedField(object,
+ name,
+ value,
+ true, // in-object store.
+ JSValue::kValueOffset));
+ if_js_value->Goto(join);
+ join->SetJoinId(call->id());
+ set_current_block(join);
+ return ast_context()->ReturnValue(value);
}
@@ -6332,7 +7423,11 @@ void HGraphBuilder::GenerateLog(CallRuntime* call) {
// Fast support for Math.random().
void HGraphBuilder::GenerateRandomHeapNumber(CallRuntime* call) {
- return Bailout("inlined runtime function: RandomHeapNumber");
+ HValue* context = environment()->LookupContext();
+ HGlobalObject* global_object = new(zone()) HGlobalObject(context);
+ AddInstruction(global_object);
+ HRandom* result = new(zone()) HRandom(global_object);
+ return ast_context()->ReturnInstruction(result, call->id());
}
@@ -6429,12 +7524,37 @@ void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
}
CHECK_ALIVE(VisitForValue(call->arguments()->last()));
+
HValue* function = Pop();
HValue* context = environment()->LookupContext();
- HInvokeFunction* result =
- new(zone()) HInvokeFunction(context, function, arg_count);
+
+ // Branch for function proxies, or other non-functions.
+ HHasInstanceTypeAndBranch* typecheck =
+ new(zone()) HHasInstanceTypeAndBranch(function, JS_FUNCTION_TYPE);
+ HBasicBlock* if_jsfunction = graph()->CreateBasicBlock();
+ HBasicBlock* if_nonfunction = graph()->CreateBasicBlock();
+ HBasicBlock* join = graph()->CreateBasicBlock();
+ typecheck->SetSuccessorAt(0, if_jsfunction);
+ typecheck->SetSuccessorAt(1, if_nonfunction);
+ current_block()->Finish(typecheck);
+
+ set_current_block(if_jsfunction);
+ HInstruction* invoke_result = AddInstruction(
+ new(zone()) HInvokeFunction(context, function, arg_count));
Drop(arg_count);
- return ast_context()->ReturnInstruction(result, call->id());
+ Push(invoke_result);
+ if_jsfunction->Goto(join);
+
+ set_current_block(if_nonfunction);
+ HInstruction* call_result = AddInstruction(
+ new(zone()) HCallFunction(context, function, arg_count));
+ Drop(arg_count);
+ Push(call_result);
+ if_nonfunction->Goto(join);
+
+ set_current_block(join);
+ join->SetJoinId(call->id());
+ return ast_context()->ReturnValue(Pop());
}
@@ -6474,6 +7594,18 @@ void HGraphBuilder::GenerateMathCos(CallRuntime* call) {
}
+void HGraphBuilder::GenerateMathTan(CallRuntime* call) {
+ ASSERT_EQ(1, call->arguments()->length());
+ CHECK_ALIVE(VisitArgumentList(call->arguments()));
+ HValue* context = environment()->LookupContext();
+ HCallStub* result =
+ new(zone()) HCallStub(context, CodeStub::TranscendentalCache, 1);
+ result->set_transcendental_type(TranscendentalCache::TAN);
+ Drop(1);
+ return ast_context()->ReturnInstruction(result, call->id());
+}
+
+
void HGraphBuilder::GenerateMathLog(CallRuntime* call) {
ASSERT_EQ(1, call->arguments()->length());
CHECK_ALIVE(VisitArgumentList(call->arguments()));
@@ -6521,6 +7653,7 @@ HEnvironment::HEnvironment(HEnvironment* outer,
: closure_(closure),
values_(0),
assigned_variables_(4),
+ frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
@@ -6535,6 +7668,7 @@ HEnvironment::HEnvironment(HEnvironment* outer,
HEnvironment::HEnvironment(const HEnvironment* other)
: values_(0),
assigned_variables_(0),
+ frame_type_(JS_FUNCTION),
parameter_count_(0),
specials_count_(1),
local_count_(0),
@@ -6546,6 +7680,23 @@ HEnvironment::HEnvironment(const HEnvironment* other)
}
+HEnvironment::HEnvironment(HEnvironment* outer,
+ Handle<JSFunction> closure,
+ FrameType frame_type,
+ int arguments)
+ : closure_(closure),
+ values_(arguments),
+ assigned_variables_(0),
+ frame_type_(frame_type),
+ parameter_count_(arguments),
+ local_count_(0),
+ outer_(outer),
+ pop_count_(0),
+ push_count_(0),
+ ast_id_(AstNode::kNoNumber) {
+}
+
+
void HEnvironment::Initialize(int parameter_count,
int local_count,
int stack_height) {
@@ -6563,6 +7714,7 @@ void HEnvironment::Initialize(const HEnvironment* other) {
closure_ = other->closure();
values_.AddAll(other->values_);
assigned_variables_.AddAll(other->assigned_variables_);
+ frame_type_ = other->frame_type_;
parameter_count_ = other->parameter_count_;
local_count_ = other->local_count_;
if (other->outer_ != NULL) outer_ = other->outer_->Copy(); // Deep copy.
@@ -6617,9 +7769,8 @@ bool HEnvironment::HasExpressionAt(int index) const {
bool HEnvironment::ExpressionStackIsEmpty() const {
- int first_expression = parameter_count() + specials_count() + local_count();
- ASSERT(length() >= first_expression);
- return length() == first_expression;
+ ASSERT(length() >= first_expression_index());
+ return length() == first_expression_index();
}
@@ -6670,32 +7821,66 @@ HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
}
+HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
+ Handle<JSFunction> target,
+ FrameType frame_type,
+ int arguments) const {
+ HEnvironment* new_env = new(closure()->GetIsolate()->zone())
+ HEnvironment(outer, target, frame_type, arguments + 1);
+ for (int i = 0; i <= arguments; ++i) { // Include receiver.
+ new_env->Push(ExpressionStackAt(arguments - i));
+ }
+ new_env->ClearHistory();
+ return new_env;
+}
+
+
HEnvironment* HEnvironment::CopyForInlining(
Handle<JSFunction> target,
+ int arguments,
FunctionLiteral* function,
HConstant* undefined,
- CallKind call_kind) const {
+ CallKind call_kind,
+ bool is_construct) const {
+ ASSERT(frame_type() == JS_FUNCTION);
+
+ Zone* zone = closure()->GetIsolate()->zone();
+
// Outer environment is a copy of this one without the arguments.
int arity = function->scope()->num_parameters();
+
HEnvironment* outer = Copy();
- outer->Drop(arity + 1); // Including receiver.
+ outer->Drop(arguments + 1); // Including receiver.
outer->ClearHistory();
- Zone* zone = closure()->GetIsolate()->zone();
+
+ if (is_construct) {
+ // Create artificial constructor stub environment. The receiver should
+ // actually be the constructor function, but we pass the newly allocated
+ // object instead, DoComputeConstructStubFrame() relies on that.
+ outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
+ }
+
+ if (arity != arguments) {
+ // Create artificial arguments adaptation environment.
+ outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
+ }
+
HEnvironment* inner =
new(zone) HEnvironment(outer, function->scope(), target);
// Get the argument values from the original environment.
for (int i = 0; i <= arity; ++i) { // Include receiver.
- HValue* push = ExpressionStackAt(arity - i);
+ HValue* push = (i <= arguments) ?
+ ExpressionStackAt(arguments - i) : undefined;
inner->SetValueAt(i, push);
}
// If the function we are inlining is a strict mode function or a
// builtin function, pass undefined as the receiver for function
// calls (instead of the global receiver).
- if ((target->shared()->native() || function->strict_mode()) &&
- call_kind == CALL_AS_FUNCTION) {
+ if ((target->shared()->native() || !function->is_classic_mode()) &&
+ call_kind == CALL_AS_FUNCTION && !is_construct) {
inner->SetValueAt(0, undefined);
}
- inner->SetValueAt(arity + 1, outer->LookupContext());
+ inner->SetValueAt(arity + 1, LookupContext());
for (int i = arity + 2; i < inner->length(); ++i) {
inner->SetValueAt(i, undefined);
}
@@ -6711,7 +7896,7 @@ void HEnvironment::PrintTo(StringStream* stream) {
if (i == parameter_count()) stream->Add("specials\n");
if (i == parameter_count() + specials_count()) stream->Add("locals\n");
if (i == parameter_count() + specials_count() + local_count()) {
- stream->Add("expressions");
+ stream->Add("expressions\n");
}
HValue* val = values_.at(i);
stream->Add("%d: ", i);
@@ -6722,6 +7907,7 @@ void HEnvironment::PrintTo(StringStream* stream) {
}
stream->Add("\n");
}
+ PrintF("\n");
}
@@ -6786,7 +7972,10 @@ void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
}
PrintEmptyProperty("xhandlers");
- PrintEmptyProperty("flags");
+ const char* flags = current->IsLoopSuccessorDominator()
+ ? "dom-loop-succ"
+ : "";
+ PrintStringProperty("flags", flags);
if (current->dominator() != NULL) {
PrintBlockProperty("dominator", current->dominator()->block_id());
@@ -6887,7 +8076,7 @@ void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
PrintIndent();
trace_.Add("%d %s", range->id(), type);
if (range->HasRegisterAssigned()) {
- LOperand* op = range->CreateAssignedOperand();
+ LOperand* op = range->CreateAssignedOperand(ZONE);
int assigned_reg = op->index();
if (op->IsDoubleRegister()) {
trace_.Add(" \"%s\"",
@@ -6913,7 +8102,9 @@ void HTracer::TraceLiveRange(LiveRange* range, const char* type) {
}
LOperand* op = range->FirstHint();
int hint_index = -1;
- if (op != NULL && op->IsUnallocated()) hint_index = op->VirtualRegister();
+ if (op != NULL && op->IsUnallocated()) {
+ hint_index = LUnallocated::cast(op)->virtual_register();
+ }
trace_.Add(" %d %d", parent_index, hint_index);
UseInterval* cur_interval = range->first_interval();
while (cur_interval != NULL && range->Covers(cur_interval->start())) {
@@ -7029,7 +8220,10 @@ void HPhase::End() const {
HStatistics::Instance()->SaveTiming(name_, end - start_, size);
}
- if (FLAG_trace_hydrogen) {
+ // Produce trace output if flag is set so that the first letter of the
+ // phase name matches the command line parameter FLAG_trace_phase.
+ if (FLAG_trace_hydrogen &&
+ OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL) {
if (graph_ != NULL) HTracer::Instance()->TraceHydrogen(name_, graph_);
if (chunk_ != NULL) HTracer::Instance()->TraceLithium(name_, chunk_);
if (allocator_ != NULL) {
diff --git a/src/3rdparty/v8/src/hydrogen.h b/src/3rdparty/v8/src/hydrogen.h
index 2d08dc8..bc9bc9d 100644
--- a/src/3rdparty/v8/src/hydrogen.h
+++ b/src/3rdparty/v8/src/hydrogen.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -126,6 +126,7 @@ class HBasicBlock: public ZoneObject {
int PredecessorIndexOf(HBasicBlock* predecessor) const;
void AddSimulate(int ast_id) { AddInstruction(CreateSimulate(ast_id)); }
void AssignCommonDominator(HBasicBlock* other);
+ void AssignLoopSuccessorDominators();
void FinishExitWithDeoptimization(HDeoptimize::UseEnvironment has_uses) {
FinishExit(CreateDeoptimize(has_uses));
@@ -149,6 +150,13 @@ class HBasicBlock: public ZoneObject {
bool IsDeoptimizing() const { return is_deoptimizing_; }
void MarkAsDeoptimizing() { is_deoptimizing_ = true; }
+ bool IsLoopSuccessorDominator() const {
+ return dominates_loop_successors_;
+ }
+ void MarkAsLoopSuccessorDominator() {
+ dominates_loop_successors_ = true;
+ }
+
inline Zone* zone();
#ifdef DEBUG
@@ -182,6 +190,22 @@ class HBasicBlock: public ZoneObject {
HBasicBlock* parent_loop_header_;
bool is_inline_return_target_;
bool is_deoptimizing_;
+ bool dominates_loop_successors_;
+};
+
+
+class HPredecessorIterator BASE_EMBEDDED {
+ public:
+ explicit HPredecessorIterator(HBasicBlock* block)
+ : predecessor_list_(block->predecessors()), current_(0) { }
+
+ bool Done() { return current_ >= predecessor_list_->length(); }
+ HBasicBlock* Current() { return predecessor_list_->at(current_); }
+ void Advance() { current_++; }
+
+ private:
+ const ZoneList<HBasicBlock*>* predecessor_list_;
+ int current_;
};
@@ -269,7 +293,6 @@ class HGraph: public ZoneObject {
HArgumentsObject* GetArgumentsObject() const {
return arguments_object_.get();
}
- bool HasArgumentsObject() const { return arguments_object_.is_set(); }
void SetArgumentsObject(HArgumentsObject* object) {
arguments_object_.set(object);
@@ -290,6 +313,26 @@ class HGraph: public ZoneObject {
void Verify(bool do_full_verify) const;
#endif
+ bool has_osr_loop_entry() {
+ return osr_loop_entry_.is_set();
+ }
+
+ HBasicBlock* osr_loop_entry() {
+ return osr_loop_entry_.get();
+ }
+
+ void set_osr_loop_entry(HBasicBlock* entry) {
+ osr_loop_entry_.set(entry);
+ }
+
+ ZoneList<HUnknownOSRValue*>* osr_values() {
+ return osr_values_.get();
+ }
+
+ void set_osr_values(ZoneList<HUnknownOSRValue*>* values) {
+ osr_values_.set(values);
+ }
+
private:
void Postorder(HBasicBlock* block,
BitVector* visited,
@@ -330,6 +373,9 @@ class HGraph: public ZoneObject {
SetOncePointer<HConstant> constant_hole_;
SetOncePointer<HArgumentsObject> arguments_object_;
+ SetOncePointer<HBasicBlock> osr_loop_entry_;
+ SetOncePointer<ZoneList<HUnknownOSRValue*> > osr_values_;
+
DISALLOW_COPY_AND_ASSIGN(HGraph);
};
@@ -337,18 +383,34 @@ class HGraph: public ZoneObject {
Zone* HBasicBlock::zone() { return graph_->zone(); }
+// Type of stack frame an environment might refer to.
+enum FrameType { JS_FUNCTION, JS_CONSTRUCT, ARGUMENTS_ADAPTOR };
+
+
class HEnvironment: public ZoneObject {
public:
HEnvironment(HEnvironment* outer,
Scope* scope,
Handle<JSFunction> closure);
+ HEnvironment* DiscardInlined(bool drop_extra) {
+ HEnvironment* outer = outer_;
+ while (outer->frame_type() != JS_FUNCTION) outer = outer->outer_;
+ if (drop_extra) outer->Drop(1);
+ return outer;
+ }
+
+ HEnvironment* arguments_environment() {
+ return outer()->frame_type() == ARGUMENTS_ADAPTOR ? outer() : this;
+ }
+
// Simple accessors.
Handle<JSFunction> closure() const { return closure_; }
const ZoneList<HValue*>* values() const { return &values_; }
const ZoneList<int>* assigned_variables() const {
return &assigned_variables_;
}
+ FrameType frame_type() const { return frame_type_; }
int parameter_count() const { return parameter_count_; }
int specials_count() const { return specials_count_; }
int local_count() const { return local_count_; }
@@ -364,6 +426,10 @@ class HEnvironment: public ZoneObject {
return i >= parameter_count() && i < parameter_count() + specials_count();
}
+ int first_expression_index() const {
+ return parameter_count() + specials_count() + local_count();
+ }
+
void Bind(Variable* variable, HValue* value) {
Bind(IndexFor(variable), value);
}
@@ -427,9 +493,11 @@ class HEnvironment: public ZoneObject {
// environment is the outer environment but the top expression stack
// elements are moved to an inner environment as parameters.
HEnvironment* CopyForInlining(Handle<JSFunction> target,
+ int arguments,
FunctionLiteral* function,
HConstant* undefined,
- CallKind call_kind) const;
+ CallKind call_kind,
+ bool is_construct) const;
void AddIncomingEdge(HBasicBlock* block, HEnvironment* other);
@@ -450,6 +518,18 @@ class HEnvironment: public ZoneObject {
private:
explicit HEnvironment(const HEnvironment* other);
+ HEnvironment(HEnvironment* outer,
+ Handle<JSFunction> closure,
+ FrameType frame_type,
+ int arguments);
+
+ // Create an artificial stub environment (e.g. for argument adaptor or
+ // constructor stub).
+ HEnvironment* CreateStubEnvironment(HEnvironment* outer,
+ Handle<JSFunction> target,
+ FrameType frame_type,
+ int arguments) const;
+
// True if index is included in the expression stack part of the environment.
bool HasExpressionAt(int index) const;
@@ -471,6 +551,7 @@ class HEnvironment: public ZoneObject {
// Value array [parameters] [specials] [locals] [temporaries].
ZoneList<HValue*> values_;
ZoneList<int> assigned_variables_;
+ FrameType frame_type_;
int parameter_count_;
int specials_count_;
int local_count_;
@@ -605,18 +686,26 @@ class TestContext: public AstContext {
};
+enum ReturnHandlingFlag {
+ NORMAL_RETURN,
+ DROP_EXTRA_ON_RETURN,
+ CONSTRUCT_CALL_RETURN
+};
+
+
class FunctionState {
public:
FunctionState(HGraphBuilder* owner,
CompilationInfo* info,
TypeFeedbackOracle* oracle,
- bool drop_extra);
+ ReturnHandlingFlag return_handling);
~FunctionState();
CompilationInfo* compilation_info() { return compilation_info_; }
TypeFeedbackOracle* oracle() { return oracle_; }
AstContext* call_context() { return call_context_; }
- bool drop_extra() { return drop_extra_; }
+ bool drop_extra() { return return_handling_ == DROP_EXTRA_ON_RETURN; }
+ bool is_construct() { return return_handling_ == CONSTRUCT_CALL_RETURN; }
HBasicBlock* function_return() { return function_return_; }
TestContext* test_context() { return test_context_; }
void ClearInlinedTestContext() {
@@ -636,11 +725,13 @@ class FunctionState {
// inlined. NULL when not inlining.
AstContext* call_context_;
- // Indicate if we have to drop an extra value from the environment on
- // return from inlined functions.
- bool drop_extra_;
+ // Indicate whether we have to perform special handling on return from
+ // inlined functions.
+ // - DROP_EXTRA_ON_RETURN: Drop an extra value from the environment.
+ // - CONSTRUCT_CALL_RETURN: Either use allocated receiver or return value.
+ ReturnHandlingFlag return_handling_;
- // When inlining in an effect of value context, this is the return block.
+ // When inlining in an effect or value context, this is the return block.
// It is NULL otherwise. When inlining in a test context, there are a
// pair of return blocks in the context. When not inlining, there is no
// local return point.
@@ -657,14 +748,19 @@ class FunctionState {
class HGraphBuilder: public AstVisitor {
public:
enum BreakType { BREAK, CONTINUE };
+ enum SwitchType { UNKNOWN_SWITCH, SMI_SWITCH, STRING_SWITCH };
// A class encapsulating (lazily-allocated) break and continue blocks for
// a breakable statement. Separated from BreakAndContinueScope so that it
// can have a separate lifetime.
class BreakAndContinueInfo BASE_EMBEDDED {
public:
- explicit BreakAndContinueInfo(BreakableStatement* target)
- : target_(target), break_block_(NULL), continue_block_(NULL) {
+ explicit BreakAndContinueInfo(BreakableStatement* target,
+ int drop_extra = 0)
+ : target_(target),
+ break_block_(NULL),
+ continue_block_(NULL),
+ drop_extra_(drop_extra) {
}
BreakableStatement* target() { return target_; }
@@ -672,11 +768,13 @@ class HGraphBuilder: public AstVisitor {
void set_break_block(HBasicBlock* block) { break_block_ = block; }
HBasicBlock* continue_block() { return continue_block_; }
void set_continue_block(HBasicBlock* block) { continue_block_ = block; }
+ int drop_extra() { return drop_extra_; }
private:
BreakableStatement* target_;
HBasicBlock* break_block_;
HBasicBlock* continue_block_;
+ int drop_extra_;
};
// A helper class to maintain a stack of current BreakAndContinueInfo
@@ -695,7 +793,7 @@ class HGraphBuilder: public AstVisitor {
BreakAndContinueScope* next() { return next_; }
// Search the break stack for a break or continue target.
- HBasicBlock* Get(BreakableStatement* stmt, BreakType type);
+ HBasicBlock* Get(BreakableStatement* stmt, BreakType type, int* drop_extra);
private:
BreakAndContinueInfo* info_;
@@ -738,6 +836,8 @@ class HGraphBuilder: public AstVisitor {
FunctionState* function_state() const { return function_state_; }
+ void VisitDeclarations(ZoneList<Declaration*>* declarations);
+
private:
// Type of a member function that generates inline code for a native function.
typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call);
@@ -755,6 +855,12 @@ class HGraphBuilder: public AstVisitor {
static const int kMaxInlinedSize = 196;
static const int kMaxSourceSize = 600;
+ // Even in the 'unlimited' case we have to have some limit in order not to
+ // overflow the stack.
+ static const int kUnlimitedMaxInlinedNodes = 1000;
+ static const int kUnlimitedMaxInlinedSize = 1000;
+ static const int kUnlimitedMaxSourceSize = 600;
+
// Simple accessors.
void set_function_state(FunctionState* state) { function_state_ = state; }
@@ -765,7 +871,6 @@ class HGraphBuilder: public AstVisitor {
CompilationInfo* info() const {
return function_state()->compilation_info();
}
-
AstContext* call_context() const {
return function_state()->call_context();
}
@@ -779,7 +884,8 @@ class HGraphBuilder: public AstVisitor {
function_state()->ClearInlinedTestContext();
}
StrictModeFlag function_strict_mode_flag() {
- return function_state()->compilation_info()->strict_mode_flag();
+ return function_state()->compilation_info()->is_classic_mode()
+ ? kNonStrictMode : kStrictMode;
}
// Generators for inline runtime functions.
@@ -792,7 +898,8 @@ class HGraphBuilder: public AstVisitor {
void HandleDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function);
+ FunctionLiteral* function,
+ int* global_count);
void VisitDelete(UnaryOperation* expr);
void VisitVoid(UnaryOperation* expr);
@@ -806,7 +913,7 @@ class HGraphBuilder: public AstVisitor {
void VisitLogicalExpression(BinaryOperation* expr);
void VisitArithmeticExpression(BinaryOperation* expr);
- void PreProcessOsrEntry(IterationStatement* statement);
+ bool PreProcessOsrEntry(IterationStatement* statement);
// True iff. we are compiling for OSR and the statement is the entry.
bool HasOsrEntryAt(IterationStatement* statement);
void VisitLoopBody(IterationStatement* stmt,
@@ -860,7 +967,7 @@ class HGraphBuilder: public AstVisitor {
// Remove the arguments from the bailout environment and emit instructions
// to push them as outgoing parameters.
- template <int V> HInstruction* PreProcessCall(HCall<V>* call);
+ template <class Instruction> HInstruction* PreProcessCall(Instruction* call);
void TraceRepresentation(Token::Value op,
TypeInfo info,
@@ -868,7 +975,7 @@ class HGraphBuilder: public AstVisitor {
Representation rep);
static Representation ToRepresentation(TypeInfo info);
- void SetupScope(Scope* scope);
+ void SetUpScope(Scope* scope);
virtual void VisitStatements(ZoneList<Statement*>* statements);
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
@@ -892,11 +999,21 @@ class HGraphBuilder: public AstVisitor {
// Try to optimize fun.apply(receiver, arguments) pattern.
bool TryCallApply(Call* expr);
- bool TryInline(Call* expr, bool drop_extra = false);
- bool TryInlineBuiltinFunction(Call* expr,
- HValue* receiver,
- Handle<Map> receiver_map,
- CheckType check_type);
+ bool TryInline(CallKind call_kind,
+ Handle<JSFunction> target,
+ ZoneList<Expression*>* arguments,
+ HValue* receiver,
+ int ast_id,
+ int return_id,
+ ReturnHandlingFlag return_handling);
+
+ bool TryInlineCall(Call* expr, bool drop_extra = false);
+ bool TryInlineConstruct(CallNew* expr, HValue* receiver);
+ bool TryInlineBuiltinMethodCall(Call* expr,
+ HValue* receiver,
+ Handle<Map> receiver_map,
+ CheckType check_type);
+ bool TryInlineBuiltinFunctionCall(Call* expr, bool drop_extra);
// If --trace-inlining, print a line of the inlining trace. Inlining
// succeeded if the reason string is NULL and failed if there is a
@@ -912,6 +1029,10 @@ class HGraphBuilder: public AstVisitor {
void HandlePropertyAssignment(Assignment* expr);
void HandleCompoundAssignment(Assignment* expr);
+ void HandlePolymorphicLoadNamedField(Property* expr,
+ HValue* object,
+ SmallMapList* types,
+ Handle<String> name);
void HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
@@ -986,6 +1107,9 @@ class HGraphBuilder: public AstVisitor {
HInstruction* BuildStoreNamed(HValue* object,
HValue* value,
Expression* expr);
+ HInstruction* BuildStoreNamed(HValue* object,
+ HValue* value,
+ ObjectLiteral::Property* prop);
HInstruction* BuildStoreNamedField(HValue* object,
Handle<String> name,
HValue* value,
@@ -1054,10 +1178,10 @@ class HValueMap: public ZoneObject {
Resize(kInitialSize);
}
- void Kill(int flags);
+ void Kill(GVNFlagSet flags);
void Add(HValue* value) {
- present_flags_ |= value->flags();
+ present_flags_.Add(value->gvn_flags());
Insert(value);
}
@@ -1090,7 +1214,8 @@ class HValueMap: public ZoneObject {
int array_size_;
int lists_size_;
int count_; // The number of values stored in the HValueMap.
- int present_flags_; // All flags that are in any value in the HValueMap.
+ GVNFlagSet present_flags_; // All flags that are in any value in the
+ // HValueMap.
HValueMapListElement* array_; // Primary store - contains the first value
// with a given hash. Colliding elements are stored in linked lists.
HValueMapListElement* lists_; // The linked lists containing hash collisions.
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h b/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
index 2e9fcb6..3cf0d00 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32-inl.h
@@ -30,13 +30,15 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// A light-weight IA32 Assembler.
#ifndef V8_IA32_ASSEMBLER_IA32_INL_H_
#define V8_IA32_ASSEMBLER_IA32_INL_H_
+#include "ia32/assembler-ia32.h"
+
#include "cpu.h"
#include "debug.h"
@@ -78,13 +80,15 @@ Address RelocInfo::target_address() {
Address RelocInfo::target_address_address() {
- ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
+ ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
+ || rmode_ == EMBEDDED_OBJECT
+ || rmode_ == EXTERNAL_REFERENCE);
return reinterpret_cast<Address>(pc_);
}
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return Assembler::kSpecialTargetSize;
}
@@ -224,7 +228,7 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- visitor->VisitExternalReference(target_reference_address());
+ visitor->VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
@@ -252,7 +256,7 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- StaticVisitor::VisitExternalReference(target_reference_address());
+ StaticVisitor::VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32.cc b/src/3rdparty/v8/src/ia32/assembler-ia32.cc
index c6030f9..a42f632 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32.cc
@@ -32,7 +32,7 @@
// The original source code covered by the above license above has been modified
// significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#include "v8.h"
@@ -350,7 +350,7 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
}
#endif
- // Setup buffer pointers.
+ // Set up buffer pointers.
ASSERT(buffer_ != NULL);
pc_ = buffer_;
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
@@ -377,7 +377,7 @@ void Assembler::GetCode(CodeDesc* desc) {
// Finalize code (at this point overflow() may be true, but the gap ensures
// that we are still not overlapping instructions and relocation info).
ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
- // Setup code descriptor.
+ // Set up code descriptor.
desc->buffer = buffer_;
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
@@ -388,8 +388,91 @@ void Assembler::GetCode(CodeDesc* desc) {
void Assembler::Align(int m) {
ASSERT(IsPowerOf2(m));
- while ((pc_offset() & (m - 1)) != 0) {
- nop();
+ int mask = m - 1;
+ int addr = pc_offset();
+ Nop((m - (addr & mask)) & mask);
+}
+
+
+bool Assembler::IsNop(Address addr) {
+ Address a = addr;
+ while (*a == 0x66) a++;
+ if (*a == 0x90) return true;
+ if (a[0] == 0xf && a[1] == 0x1f) return true;
+ return false;
+}
+
+
+void Assembler::Nop(int bytes) {
+ EnsureSpace ensure_space(this);
+
+ if (!CpuFeatures::IsSupported(SSE2)) {
+ // Older CPUs that do not support SSE2 may not support multibyte NOP
+ // instructions.
+ for (; bytes > 0; bytes--) {
+ EMIT(0x90);
+ }
+ return;
+ }
+
+ // Multi byte nops from http://support.amd.com/us/Processor_TechDocs/40546.pdf
+ while (bytes > 0) {
+ switch (bytes) {
+ case 2:
+ EMIT(0x66);
+ case 1:
+ EMIT(0x90);
+ return;
+ case 3:
+ EMIT(0xf);
+ EMIT(0x1f);
+ EMIT(0);
+ return;
+ case 4:
+ EMIT(0xf);
+ EMIT(0x1f);
+ EMIT(0x40);
+ EMIT(0);
+ return;
+ case 6:
+ EMIT(0x66);
+ case 5:
+ EMIT(0xf);
+ EMIT(0x1f);
+ EMIT(0x44);
+ EMIT(0);
+ EMIT(0);
+ return;
+ case 7:
+ EMIT(0xf);
+ EMIT(0x1f);
+ EMIT(0x80);
+ EMIT(0);
+ EMIT(0);
+ EMIT(0);
+ EMIT(0);
+ return;
+ default:
+ case 11:
+ EMIT(0x66);
+ bytes--;
+ case 10:
+ EMIT(0x66);
+ bytes--;
+ case 9:
+ EMIT(0x66);
+ bytes--;
+ case 8:
+ EMIT(0xf);
+ EMIT(0x1f);
+ EMIT(0x84);
+ EMIT(0);
+ EMIT(0);
+ EMIT(0);
+ EMIT(0);
+ EMIT(0);
+ bytes -= 8;
+ }
}
}
@@ -463,13 +546,6 @@ void Assembler::push(const Operand& src) {
}
-void Assembler::push(Handle<Object> handle) {
- EnsureSpace ensure_space(this);
- EMIT(0x68);
- emit(handle);
-}
-
-
void Assembler::pop(Register dst) {
ASSERT(reloc_info_writer.last_pc() != NULL);
EnsureSpace ensure_space(this);
@@ -499,7 +575,7 @@ void Assembler::leave() {
void Assembler::mov_b(Register dst, const Operand& src) {
- ASSERT(dst.code() < 4);
+ CHECK(dst.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0x8A);
emit_operand(dst, src);
@@ -515,7 +591,7 @@ void Assembler::mov_b(const Operand& dst, int8_t imm8) {
void Assembler::mov_b(const Operand& dst, Register src) {
- ASSERT(src.code() < 4);
+ CHECK(src.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0x88);
emit_operand(src, dst);
@@ -753,7 +829,7 @@ void Assembler::cmpb(const Operand& op, int8_t imm8) {
void Assembler::cmpb(const Operand& op, Register reg) {
- ASSERT(reg.is_byte_register());
+ CHECK(reg.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0x38);
emit_operand(reg, op);
@@ -761,7 +837,7 @@ void Assembler::cmpb(const Operand& op, Register reg) {
void Assembler::cmpb(Register reg, const Operand& op) {
- ASSERT(reg.is_byte_register());
+ CHECK(reg.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0x3A);
emit_operand(reg, op);
@@ -825,6 +901,7 @@ void Assembler::cmpw_ax(const Operand& op) {
void Assembler::dec_b(Register dst) {
+ CHECK(dst.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0xFE);
EMIT(0xC8 | dst.code());
@@ -1098,7 +1175,9 @@ void Assembler::test(Register reg, const Immediate& imm) {
EnsureSpace ensure_space(this);
// Only use test against byte for registers that have a byte
// variant: eax, ebx, ecx, and edx.
- if (imm.rmode_ == RelocInfo::NONE && is_uint8(imm.x_) && reg.code() < 4) {
+ if (imm.rmode_ == RelocInfo::NONE &&
+ is_uint8(imm.x_) &&
+ reg.is_byte_register()) {
uint8_t imm8 = imm.x_;
if (reg.is(eax)) {
EMIT(0xA8);
@@ -1128,6 +1207,7 @@ void Assembler::test(Register reg, const Operand& op) {
void Assembler::test_b(Register reg, const Operand& op) {
+ CHECK(reg.is_byte_register());
EnsureSpace ensure_space(this);
EMIT(0x84);
emit_operand(reg, op);
@@ -1143,7 +1223,7 @@ void Assembler::test(const Operand& op, const Immediate& imm) {
void Assembler::test_b(const Operand& op, uint8_t imm8) {
- if (op.is_reg_only() && op.reg().code() >= 4) {
+ if (op.is_reg_only() && !op.reg().is_byte_register()) {
test(op, Immediate(imm8));
return;
}
@@ -1626,6 +1706,13 @@ void Assembler::fsin() {
}
+void Assembler::fptan() {
+ EnsureSpace ensure_space(this);
+ EMIT(0xD9);
+ EMIT(0xF2);
+}
+
+
void Assembler::fyl2x() {
EnsureSpace ensure_space(this);
EMIT(0xD9);
@@ -1633,6 +1720,27 @@ void Assembler::fyl2x() {
}
+void Assembler::f2xm1() {
+ EnsureSpace ensure_space(this);
+ EMIT(0xD9);
+ EMIT(0xF0);
+}
+
+
+void Assembler::fscale() {
+ EnsureSpace ensure_space(this);
+ EMIT(0xD9);
+ EMIT(0xFD);
+}
+
+
+void Assembler::fninit() {
+ EnsureSpace ensure_space(this);
+ EMIT(0xDB);
+ EMIT(0xE3);
+}
+
+
void Assembler::fadd(int i) {
EnsureSpace ensure_space(this);
emit_farith(0xDC, 0xC0, i);
@@ -1946,6 +2054,16 @@ void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
}
+void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
+ ASSERT(CpuFeatures::IsEnabled(SSE2));
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x2E);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
ASSERT(CpuFeatures::IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
@@ -2151,6 +2269,19 @@ void Assembler::movd(const Operand& dst, XMMRegister src) {
}
+void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
+ ASSERT(CpuFeatures::IsSupported(SSE4_1));
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x3A);
+ EMIT(0x17);
+ emit_sse_operand(dst, src);
+ EMIT(imm8);
+}
+
+
void Assembler::pand(XMMRegister dst, XMMRegister src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
@@ -2330,7 +2461,7 @@ void Assembler::GrowBuffer() {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
- // Setup new buffer.
+ // Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);
desc.instr_size = pc_offset();
desc.reloc_size = (buffer_ + buffer_size_) - (reloc_info_writer.pos());
diff --git a/src/3rdparty/v8/src/ia32/assembler-ia32.h b/src/3rdparty/v8/src/ia32/assembler-ia32.h
index fbd04bb..929b485 100644
--- a/src/3rdparty/v8/src/ia32/assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/assembler-ia32.h
@@ -97,16 +97,25 @@ struct Register {
int code_;
};
-
-const Register eax = { 0 };
-const Register ecx = { 1 };
-const Register edx = { 2 };
-const Register ebx = { 3 };
-const Register esp = { 4 };
-const Register ebp = { 5 };
-const Register esi = { 6 };
-const Register edi = { 7 };
-const Register no_reg = { -1 };
+const int kRegister_eax_Code = 0;
+const int kRegister_ecx_Code = 1;
+const int kRegister_edx_Code = 2;
+const int kRegister_ebx_Code = 3;
+const int kRegister_esp_Code = 4;
+const int kRegister_ebp_Code = 5;
+const int kRegister_esi_Code = 6;
+const int kRegister_edi_Code = 7;
+const int kRegister_no_reg_Code = -1;
+
+const Register eax = { kRegister_eax_Code };
+const Register ecx = { kRegister_ecx_Code };
+const Register edx = { kRegister_edx_Code };
+const Register ebx = { kRegister_ebx_Code };
+const Register esp = { kRegister_esp_Code };
+const Register ebp = { kRegister_ebp_Code };
+const Register esi = { kRegister_esi_Code };
+const Register edi = { kRegister_edi_Code };
+const Register no_reg = { kRegister_no_reg_Code };
inline const char* Register::AllocationIndexToString(int index) {
@@ -589,8 +598,8 @@ class Assembler : public AssemblerBase {
// This sets the branch destination (which is in the instruction on x86).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
set_target_address_at(instruction_payload, target);
}
@@ -601,8 +610,7 @@ class Assembler : public AssemblerBase {
set_target_address_at(instruction_payload, target);
}
- static const int kCallTargetSize = kPointerSize;
- static const int kExternalTargetSize = kPointerSize;
+ static const int kSpecialTargetSize = kPointerSize;
// Distance between the address of the code target in the call instruction
// and the return address
@@ -621,8 +629,6 @@ class Assembler : public AssemblerBase {
// The debug break slot must be able to contain a call instruction.
static const int kDebugBreakSlotLength = kCallInstructionLength;
- // One byte opcode for test eax,0xXXXXXXXX.
- static const byte kTestEaxByte = 0xA9;
// One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8;
// One byte opcode for nop.
@@ -659,6 +665,7 @@ class Assembler : public AssemblerBase {
// possible to align the pc offset to a multiple
// of m. m must be a power of 2.
void Align(int m);
+ void Nop(int bytes = 1);
// Aligns code to something that's optimal for a jump target for the platform.
void CodeTargetAlign();
@@ -673,7 +680,6 @@ class Assembler : public AssemblerBase {
void push_imm32(int32_t imm32);
void push(Register src);
void push(const Operand& src);
- void push(Handle<Object> handle);
void pop(Register dst);
void pop(const Operand& dst);
@@ -924,7 +930,11 @@ class Assembler : public AssemblerBase {
void fchs();
void fcos();
void fsin();
+ void fptan();
void fyl2x();
+ void f2xm1();
+ void fscale();
+ void fninit();
void fadd(int i);
void fsub(int i);
@@ -982,6 +992,7 @@ class Assembler : public AssemblerBase {
void andpd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src);
+ void ucomisd(XMMRegister dst, const Operand& src);
enum RoundingMode {
kRoundToNearest = 0x0,
@@ -1016,6 +1027,7 @@ class Assembler : public AssemblerBase {
void movss(XMMRegister dst, const Operand& src);
void movss(const Operand& dst, XMMRegister src);
void movss(XMMRegister dst, XMMRegister src);
+ void extractps(Register dst, XMMRegister src, byte imm8);
void pand(XMMRegister dst, XMMRegister src);
void pxor(XMMRegister dst, XMMRegister src);
@@ -1079,7 +1091,7 @@ class Assembler : public AssemblerBase {
// Get the number of bytes available in the buffer.
inline int available_space() const { return reloc_info_writer.pos() - pc_; }
- static bool IsNop(Address addr) { return *addr == 0x90; }
+ static bool IsNop(Address addr);
PositionsRecorder* positions_recorder() { return &positions_recorder_; }
diff --git a/src/3rdparty/v8/src/ia32/builtins-ia32.cc b/src/3rdparty/v8/src/ia32/builtins-ia32.cc
index ac4da4c..a5d42cf 100644
--- a/src/3rdparty/v8/src/ia32/builtins-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/builtins-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -74,50 +74,14 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
-void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
+static void Generate_JSConstructStubHelper(MacroAssembler* masm,
+ bool is_api_function,
+ bool count_constructions) {
// ----------- S t a t e -------------
// -- eax: number of arguments
// -- edi: constructor function
// -----------------------------------
- Label slow, non_function_call;
- // Check that function is not a smi.
- __ JumpIfSmi(edi, &non_function_call);
- // Check that function is a JSFunction.
- __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(not_equal, &slow);
-
- // Jump to the function-specific construct stub.
- __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kConstructStubOffset));
- __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
- __ jmp(ebx);
-
- // edi: called object
- // eax: number of arguments
- // ecx: object map
- Label do_call;
- __ bind(&slow);
- __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function_call);
- __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
- __ jmp(&do_call);
-
- __ bind(&non_function_call);
- __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ bind(&do_call);
- // Set expected number of arguments to zero (not changing eax).
- __ Set(ebx, Immediate(0));
- Handle<Code> arguments_adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
- __ SetCallKind(ecx, CALL_AS_METHOD);
- __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
-}
-
-
-static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool count_constructions) {
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
@@ -333,7 +297,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ push(ebx);
__ push(ebx);
- // Setup pointer to last argument.
+ // Set up pointer to last argument.
__ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
// Copy arguments and receiver to the expression stack.
@@ -360,6 +324,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -454,8 +423,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code.
if (is_construct) {
- __ call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ __ CallStub(&stub);
} else {
ParameterCount actual(eax);
__ InvokeFunction(edi, actual, CALL_FUNCTION,
@@ -537,7 +506,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
{
FrameScope scope(masm, StackFrame::INTERNAL);
- // Pass the function and deoptimization type to the runtime system.
+ // Pass deoptimization type to the runtime system.
__ push(Immediate(Smi::FromInt(static_cast<int>(type))));
__ CallRuntime(Runtime::kNotifyDeoptimized, 1);
@@ -719,7 +688,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ test(edx, edx);
__ j(zero, &function);
__ Set(ebx, Immediate(0));
- __ SetCallKind(ecx, CALL_AS_METHOD);
__ cmp(edx, Immediate(1));
__ j(not_equal, &non_proxy);
@@ -727,11 +695,13 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ push(edi); // re-add proxy object as additional argument
__ push(edx);
__ inc(eax);
+ __ SetCallKind(ecx, CALL_AS_FUNCTION);
__ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
__ bind(&non_proxy);
+ __ SetCallKind(ecx, CALL_AS_METHOD);
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
@@ -928,9 +898,8 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- // Load the initial map from the array function.
- __ mov(scratch1, FieldOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1033,10 +1002,7 @@ static void AllocateJSArray(MacroAssembler* masm,
ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
- // Load the initial map from the array function.
- __ mov(elements_array,
- FieldOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch, elements_array);
// Allocate the JSArray object together with space for a FixedArray with the
// requested elements.
@@ -1127,7 +1093,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
bool construct_call,
Label* call_generic_code) {
Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
- empty_array, not_empty_array;
+ empty_array, not_empty_array, finish, cant_transition_map, not_double;
// Push the constructor and argc. No need to tag argc as a smi, as there will
// be no garbage collection with this on the stack.
@@ -1237,37 +1203,42 @@ static void ArrayNativeCode(MacroAssembler* masm,
false,
&prepare_generic_code_call);
__ IncrementCounter(counters->array_function_native(), 1);
- __ mov(eax, ebx);
- __ pop(ebx);
- if (construct_call) {
- __ pop(edi);
- }
- __ push(eax);
- // eax: JSArray
+ __ push(ebx);
+ __ mov(ebx, Operand(esp, kPointerSize));
// ebx: argc
// edx: elements_array_end (untagged)
// esp[0]: JSArray
- // esp[4]: return address
- // esp[8]: last argument
+ // esp[4]: argc
+ // esp[8]: constructor (only if construct_call)
+ // esp[12]: return address
+ // esp[16]: last argument
// Location of the last argument
- __ lea(edi, Operand(esp, 2 * kPointerSize));
+ int last_arg_offset = (construct_call ? 4 : 3) * kPointerSize;
+ __ lea(edi, Operand(esp, last_arg_offset));
// Location of the first array element (Parameter fill_with_holes to
- // AllocateJSArrayis false, so the FixedArray is returned in ecx).
+ // AllocateJSArray is false, so the FixedArray is returned in ecx).
__ lea(edx, Operand(ecx, FixedArray::kHeaderSize - kHeapObjectTag));
+ Label has_non_smi_element;
+
// ebx: argc
// edx: location of the first array element
// edi: location of the last argument
// esp[0]: JSArray
- // esp[4]: return address
- // esp[8]: last argument
+ // esp[4]: argc
+ // esp[8]: constructor (only if construct_call)
+ // esp[12]: return address
+ // esp[16]: last argument
Label loop, entry;
__ mov(ecx, ebx);
__ jmp(&entry);
__ bind(&loop);
__ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
+ if (FLAG_smi_only_arrays) {
+ __ JumpIfNotSmi(eax, &has_non_smi_element);
+ }
__ mov(Operand(edx, 0), eax);
__ add(edx, Immediate(kPointerSize));
__ bind(&entry);
@@ -1277,13 +1248,56 @@ static void ArrayNativeCode(MacroAssembler* masm,
// Remove caller arguments from the stack and return.
// ebx: argc
// esp[0]: JSArray
- // esp[4]: return address
- // esp[8]: last argument
+ // esp[4]: argc
+ // esp[8]: constructor (only if construct_call)
+ // esp[12]: return address
+ // esp[16]: last argument
+ __ bind(&finish);
+ __ mov(ecx, Operand(esp, last_arg_offset - kPointerSize));
__ pop(eax);
- __ pop(ecx);
- __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
- __ push(ecx);
- __ ret(0);
+ __ pop(ebx);
+ __ lea(esp, Operand(esp, ebx, times_pointer_size,
+ last_arg_offset - kPointerSize));
+ __ jmp(ecx);
+
+ __ bind(&has_non_smi_element);
+ // Double values are handled by the runtime.
+ __ CheckMap(eax,
+ masm->isolate()->factory()->heap_number_map(),
+ &not_double,
+ DONT_DO_SMI_CHECK);
+ __ bind(&cant_transition_map);
+ // Throw away the array that's only been partially constructed.
+ __ pop(eax);
+ __ UndoAllocationInNewSpace(eax);
+ __ jmp(&prepare_generic_code_call);
+
+ __ bind(&not_double);
+ // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ __ mov(ebx, Operand(esp, 0));
+ __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(
+ FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ edi,
+ eax,
+ &cant_transition_map);
+ __ mov(FieldOperand(ebx, HeapObject::kMapOffset), edi);
+ __ RecordWriteField(ebx, HeapObject::kMapOffset, edi, eax,
+ kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Prepare to re-enter the loop
+ __ lea(edi, Operand(esp, last_arg_offset));
+
+ // Finish the array initialization loop.
+ Label loop2;
+ __ bind(&loop2);
+ __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
+ __ mov(Operand(edx, 0), eax);
+ __ add(edx, Immediate(kPointerSize));
+ __ dec(ecx);
+ __ j(greater_equal, &loop2);
+ __ jmp(&finish);
// Restore argc and constructor before running the generic code.
__ bind(&prepare_generic_code_call);
@@ -1295,6 +1309,40 @@ static void ArrayNativeCode(MacroAssembler* masm,
}
+void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : argc
+ // -- esp[0] : return address
+ // -- esp[4] : last argument
+ // -----------------------------------
+ Label generic_array_code;
+
+ // Get the InternalArray function.
+ __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
+
+ if (FLAG_debug_code) {
+ // Initial map for the builtin InternalArray function should be a map.
+ __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ __ test(ebx, Immediate(kSmiTagMask));
+ __ Assert(not_zero, "Unexpected initial map for InternalArray function");
+ __ CmpObjectType(ebx, MAP_TYPE, ecx);
+ __ Assert(equal, "Unexpected initial map for InternalArray function");
+ }
+
+ // Run the native code for the InternalArray function called as a normal
+ // function.
+ ArrayNativeCode(masm, false, &generic_array_code);
+
+ // Jump to the generic internal array code in case the specialized code cannot
+ // handle the construction.
+ __ bind(&generic_array_code);
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->InternalArrayCodeGeneric();
+ __ jmp(array_code, RelocInfo::CODE_TARGET);
+}
+
+
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc
@@ -1307,7 +1355,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
if (FLAG_debug_code) {
- // Initial map for the builtin Array function shoud be a map.
+ // Initial map for the builtin Array function should be a map.
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
@@ -1596,6 +1644,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ call(edx);
+ // Store offset of return address for deoptimizer.
+ masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Leave frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ ret(0);
@@ -1650,8 +1701,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
__ j(not_equal, &skip, Label::kNear);
__ ret(0);
- // If we decide not to perform on-stack replacement we perform a
- // stack guard check to enable interrupts.
+ // Insert a stack guard check so that if we decide not to perform
+ // on-stack replacement right away, the function calling this stub can
+ // still be interrupted.
__ bind(&stack_check);
Label ok;
ExternalReference stack_limit =
diff --git a/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc b/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
index b2ff97d..4faa6a4 100644
--- a/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/code-stubs-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,6 +35,7 @@
#include "jsregexp.h"
#include "regexp-macro-assembler.h"
#include "stub-cache.h"
+#include "codegen.h"
namespace v8 {
namespace internal {
@@ -71,9 +72,9 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Get the function info from the stack.
__ mov(edx, Operand(esp, 1 * kPointerSize));
- int map_index = strict_mode_ == kStrictMode
- ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
- : Context::FUNCTION_MAP_INDEX;
+ int map_index = (language_mode_ == CLASSIC_MODE)
+ ? Context::FUNCTION_MAP_INDEX
+ : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
@@ -127,14 +128,14 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Get the function from the stack.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
Factory* factory = masm->isolate()->factory();
__ mov(FieldOperand(eax, HeapObject::kMapOffset),
factory->function_context_map());
__ mov(FieldOperand(eax, Context::kLengthOffset),
Immediate(Smi::FromInt(length)));
- // Setup the fixed slots.
+ // Set up the fixed slots.
__ Set(ebx, Immediate(0)); // Set to NULL.
__ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
__ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
@@ -144,11 +145,6 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
- // Copy the qml global object from the previous context.
- __ mov(ebx, Operand(esi, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ mov(Operand(eax, Context::SlotOffset(Context::QML_GLOBAL_INDEX)), ebx);
-
-
// Initialize the rest of the slots to undefined.
__ mov(ebx, factory->undefined_value());
for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
@@ -183,7 +179,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Get the serialized scope info from the stack.
__ mov(ebx, Operand(esp, 2 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
Factory* factory = masm->isolate()->factory();
__ mov(FieldOperand(eax, HeapObject::kMapOffset),
factory->block_context_map());
@@ -206,7 +202,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
- // Setup the fixed slots.
+ // Set up the fixed slots.
__ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
__ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
__ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
@@ -215,10 +211,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ mov(ebx, ContextOperand(esi, Context::GLOBAL_INDEX));
__ mov(ContextOperand(eax, Context::GLOBAL_INDEX), ebx);
- // Copy the qml global object from the previous context.
- __ mov(ebx, ContextOperand(esi, Context::QML_GLOBAL_INDEX));
- __ mov(ContextOperand(eax, Context::QML_GLOBAL_INDEX), ebx);
-
// Initialize the rest of the slots to the hole value.
if (slots_ == 1) {
__ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
@@ -240,70 +232,38 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
+static void GenerateFastCloneShallowArrayCommon(
+ MacroAssembler* masm,
+ int length,
+ FastCloneShallowArrayStub::Mode mode,
+ Label* fail) {
+ // Registers on entry:
//
- // [esp + kPointerSize]: constant elements.
- // [esp + (2 * kPointerSize)]: literal index.
- // [esp + (3 * kPointerSize)]: literals array.
+ // ecx: boilerplate literal array.
+ ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
- if (length_ > 0) {
- elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
- ? FixedDoubleArray::SizeFor(length_)
- : FixedArray::SizeFor(length_);
+ if (length > 0) {
+ elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ ? FixedDoubleArray::SizeFor(length)
+ : FixedArray::SizeFor(length);
}
int size = JSArray::kSize + elements_size;
- // Load boilerplate object into ecx and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ mov(ecx, Operand(esp, 3 * kPointerSize));
- __ mov(eax, Operand(esp, 2 * kPointerSize));
- STATIC_ASSERT(kPointerSize == 4);
- STATIC_ASSERT(kSmiTagSize == 1);
- STATIC_ASSERT(kSmiTag == 0);
- __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
- FixedArray::kHeaderSize));
- Factory* factory = masm->isolate()->factory();
- __ cmp(ecx, factory->undefined_value());
- __ j(equal, &slow_case);
-
- if (FLAG_debug_code) {
- const char* message;
- Handle<Map> expected_map;
- if (mode_ == CLONE_ELEMENTS) {
- message = "Expected (writable) fixed array";
- expected_map = factory->fixed_array_map();
- } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
- message = "Expected (writable) fixed double array";
- expected_map = factory->fixed_double_array_map();
- } else {
- ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
- message = "Expected copy-on-write fixed array";
- expected_map = factory->fixed_cow_array_map();
- }
- __ push(ecx);
- __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
- __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
- __ Assert(equal, message);
- __ pop(ecx);
- }
-
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
- __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
+ __ AllocateInNewSpace(size, eax, ebx, edx, fail, TAG_OBJECT);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
+ if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ mov(ebx, FieldOperand(ecx, i));
__ mov(FieldOperand(eax, i), ebx);
}
}
- if (length_ > 0) {
+ if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
@@ -311,13 +271,13 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
// Copy the elements array.
- if (mode_ == CLONE_ELEMENTS) {
+ if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
for (int i = 0; i < elements_size; i += kPointerSize) {
__ mov(ebx, FieldOperand(ecx, i));
__ mov(FieldOperand(edx, i), ebx);
}
} else {
- ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
+ ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
int i;
for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
__ mov(ebx, FieldOperand(ecx, i));
@@ -331,7 +291,75 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
ASSERT(i == elements_size);
}
}
+}
+
+void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [esp + kPointerSize]: constant elements.
+ // [esp + (2 * kPointerSize)]: literal index.
+ // [esp + (3 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into ecx and check if we need to create a
+ // boilerplate.
+ __ mov(ecx, Operand(esp, 3 * kPointerSize));
+ __ mov(eax, Operand(esp, 2 * kPointerSize));
+ STATIC_ASSERT(kPointerSize == 4);
+ STATIC_ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0);
+ __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ Factory* factory = masm->isolate()->factory();
+ __ cmp(ecx, factory->undefined_value());
+ Label slow_case;
+ __ j(equal, &slow_case);
+
+ FastCloneShallowArrayStub::Mode mode = mode_;
+ // ecx is boilerplate object.
+ if (mode == CLONE_ANY_ELEMENTS) {
+ Label double_elements, check_fast_elements;
+ __ mov(ebx, FieldOperand(ecx, JSArray::kElementsOffset));
+ __ CheckMap(ebx, factory->fixed_cow_array_map(),
+ &check_fast_elements, DONT_DO_SMI_CHECK);
+ GenerateFastCloneShallowArrayCommon(masm, 0,
+ COPY_ON_WRITE_ELEMENTS, &slow_case);
+ __ ret(3 * kPointerSize);
+
+ __ bind(&check_fast_elements);
+ __ CheckMap(ebx, factory->fixed_array_map(),
+ &double_elements, DONT_DO_SMI_CHECK);
+ GenerateFastCloneShallowArrayCommon(masm, length_,
+ CLONE_ELEMENTS, &slow_case);
+ __ ret(3 * kPointerSize);
+
+ __ bind(&double_elements);
+ mode = CLONE_DOUBLE_ELEMENTS;
+ // Fall through to generate the code to handle double elements.
+ }
+
+ if (FLAG_debug_code) {
+ const char* message;
+ Handle<Map> expected_map;
+ if (mode == CLONE_ELEMENTS) {
+ message = "Expected (writable) fixed array";
+ expected_map = factory->fixed_array_map();
+ } else if (mode == CLONE_DOUBLE_ELEMENTS) {
+ message = "Expected (writable) fixed double array";
+ expected_map = factory->fixed_double_array_map();
+ } else {
+ ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
+ message = "Expected copy-on-write fixed array";
+ expected_map = factory->fixed_cow_array_map();
+ }
+ __ push(ecx);
+ __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
+ __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), expected_map);
+ __ Assert(equal, message);
+ __ pop(ecx);
+ }
+
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
// Return and remove the on-stack parameters.
__ ret(3 * kPointerSize);
@@ -340,6 +368,52 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
+void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [esp + kPointerSize]: object literal flags.
+ // [esp + (2 * kPointerSize)]: constant properties.
+ // [esp + (3 * kPointerSize)]: literal index.
+ // [esp + (4 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into ecx and check if we need to create a
+ // boilerplate.
+ Label slow_case;
+ __ mov(ecx, Operand(esp, 4 * kPointerSize));
+ __ mov(eax, Operand(esp, 3 * kPointerSize));
+ STATIC_ASSERT(kPointerSize == 4);
+ STATIC_ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0);
+ __ mov(ecx, FieldOperand(ecx, eax, times_half_pointer_size,
+ FixedArray::kHeaderSize));
+ Factory* factory = masm->isolate()->factory();
+ __ cmp(ecx, factory->undefined_value());
+ __ j(equal, &slow_case);
+
+ // Check that the boilerplate contains only fast properties and we can
+ // statically determine the instance size.
+ int size = JSObject::kHeaderSize + length_ * kPointerSize;
+ __ mov(eax, FieldOperand(ecx, HeapObject::kMapOffset));
+ __ movzx_b(eax, FieldOperand(eax, Map::kInstanceSizeOffset));
+ __ cmp(eax, Immediate(size >> kPointerSizeLog2));
+ __ j(not_equal, &slow_case);
+
+ // Allocate the JS object and copy header together with all in-object
+ // properties from the boilerplate.
+ __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ mov(ebx, FieldOperand(ecx, i));
+ __ mov(FieldOperand(eax, i), ebx);
+ }
+
+ // Return and remove the on-stack parameters.
+ __ ret(4 * kPointerSize);
+
+ __ bind(&slow_case);
+ __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
+}
+
+
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
@@ -675,7 +749,7 @@ static void IntegerConvert(MacroAssembler* masm,
// Exponent word in scratch, exponent part of exponent word in scratch2.
// Zero in ecx.
// We know the exponent is smaller than 30 (biased). If it is less than
- // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
+ // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, i.e.
// it rounds to zero.
const uint32_t zero_exponent =
(HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
@@ -2411,6 +2485,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ cmp(edx, Operand(ecx, kIntSize));
__ j(not_equal, &cache_miss, Label::kNear);
// Cache hit!
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->transcendental_cache_hit(), 1);
__ mov(eax, Operand(ecx, 2 * kIntSize));
if (tagged) {
__ fstp(0);
@@ -2421,6 +2497,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
}
__ bind(&cache_miss);
+ __ IncrementCounter(counters->transcendental_cache_miss(), 1);
// Update cache with new value.
// We are short on registers, so use no_reg as scratch.
// This gives slightly larger code.
@@ -2433,7 +2510,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ fld_d(Operand(esp, 0));
__ add(esp, Immediate(kDoubleSize));
}
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ mov(Operand(ecx, 0), ebx);
__ mov(Operand(ecx, kIntSize), edx);
__ mov(Operand(ecx, 2 * kIntSize), eax);
@@ -2449,7 +2526,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ sub(esp, Immediate(kDoubleSize));
__ movdbl(Operand(esp, 0), xmm1);
__ fld_d(Operand(esp, 0));
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ fstp_d(Operand(esp, 0));
__ movdbl(xmm1, Operand(esp, 0));
__ add(esp, Immediate(kDoubleSize));
@@ -2492,6 +2569,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
switch (type_) {
case TranscendentalCache::SIN: return Runtime::kMath_sin;
case TranscendentalCache::COS: return Runtime::kMath_cos;
+ case TranscendentalCache::TAN: return Runtime::kMath_tan;
case TranscendentalCache::LOG: return Runtime::kMath_log;
default:
UNIMPLEMENTED();
@@ -2500,12 +2578,15 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
-void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
+void TranscendentalCacheStub::GenerateOperation(
+ MacroAssembler* masm, TranscendentalCache::Type type) {
// Only free register is edi.
// Input value is on FP stack, and also in ebx/edx.
// Input value is possibly in xmm1.
// Address of result (a newly allocated HeapNumber) may be in eax.
- if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
+ if (type == TranscendentalCache::SIN ||
+ type == TranscendentalCache::COS ||
+ type == TranscendentalCache::TAN) {
// Both fsin and fcos require arguments in the range +/-2^63 and
// return NaN for infinities and NaN. They can share all code except
// the actual fsin/fcos operation.
@@ -2569,19 +2650,25 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// FPU Stack: input % 2*pi
__ bind(&in_range);
- switch (type_) {
+ switch (type) {
case TranscendentalCache::SIN:
__ fsin();
break;
case TranscendentalCache::COS:
__ fcos();
break;
+ case TranscendentalCache::TAN:
+ // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
+ // FP register stack.
+ __ fptan();
+ __ fstp(0); // Pop FP register stack.
+ break;
default:
UNREACHABLE();
}
__ bind(&done);
} else {
- ASSERT(type_ == TranscendentalCache::LOG);
+ ASSERT(type == TranscendentalCache::LOG);
__ fldln2();
__ fxch();
__ fyl2x();
@@ -2852,157 +2939,263 @@ void FloatingPointHelper::CheckFloatOperandsAreInt32(MacroAssembler* masm,
void MathPowStub::Generate(MacroAssembler* masm) {
- // Registers are used as follows:
- // edx = base
- // eax = exponent
- // ecx = temporary, result
-
CpuFeatures::Scope use_sse2(SSE2);
- Label allocate_return, call_runtime;
-
- // Load input parameters.
- __ mov(edx, Operand(esp, 2 * kPointerSize));
- __ mov(eax, Operand(esp, 1 * kPointerSize));
-
- // Save 1 in xmm3 - we need this several times later on.
- __ mov(ecx, Immediate(1));
- __ cvtsi2sd(xmm3, ecx);
-
- Label exponent_nonsmi;
- Label base_nonsmi;
- // If the exponent is a heap number go to that specific case.
- __ JumpIfNotSmi(eax, &exponent_nonsmi);
- __ JumpIfNotSmi(edx, &base_nonsmi);
-
- // Optimized version when both exponent and base are smis.
- Label powi;
- __ SmiUntag(edx);
- __ cvtsi2sd(xmm0, edx);
- __ jmp(&powi);
- // exponent is smi and base is a heapnumber.
- __ bind(&base_nonsmi);
Factory* factory = masm->isolate()->factory();
- __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- factory->heap_number_map());
- __ j(not_equal, &call_runtime);
+ const Register exponent = eax;
+ const Register base = edx;
+ const Register scratch = ecx;
+ const XMMRegister double_result = xmm3;
+ const XMMRegister double_base = xmm2;
+ const XMMRegister double_exponent = xmm1;
+ const XMMRegister double_scratch = xmm4;
+
+ Label call_runtime, done, exponent_not_smi, int_exponent;
+
+ // Save 1 in double_result - we need this several times later on.
+ __ mov(scratch, Immediate(1));
+ __ cvtsi2sd(double_result, scratch);
+
+ if (exponent_type_ == ON_STACK) {
+ Label base_is_smi, unpack_exponent;
+ // The exponent and base are supplied as arguments on the stack.
+ // This can only happen if the stub is called from non-optimized code.
+ // Load input parameters from stack.
+ __ mov(base, Operand(esp, 2 * kPointerSize));
+ __ mov(exponent, Operand(esp, 1 * kPointerSize));
+
+ __ JumpIfSmi(base, &base_is_smi, Label::kNear);
+ __ cmp(FieldOperand(base, HeapObject::kMapOffset),
+ factory->heap_number_map());
+ __ j(not_equal, &call_runtime);
+
+ __ movdbl(double_base, FieldOperand(base, HeapNumber::kValueOffset));
+ __ jmp(&unpack_exponent, Label::kNear);
+
+ __ bind(&base_is_smi);
+ __ SmiUntag(base);
+ __ cvtsi2sd(double_base, base);
+
+ __ bind(&unpack_exponent);
+ __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
+ __ SmiUntag(exponent);
+ __ jmp(&int_exponent);
+
+ __ bind(&exponent_not_smi);
+ __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
+ factory->heap_number_map());
+ __ j(not_equal, &call_runtime);
+ __ movdbl(double_exponent,
+ FieldOperand(exponent, HeapNumber::kValueOffset));
+ } else if (exponent_type_ == TAGGED) {
+ __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
+ __ SmiUntag(exponent);
+ __ jmp(&int_exponent);
+
+ __ bind(&exponent_not_smi);
+ __ movdbl(double_exponent,
+ FieldOperand(exponent, HeapNumber::kValueOffset));
+ }
- __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
+ if (exponent_type_ != INTEGER) {
+ Label fast_power;
+ // Detect integer exponents stored as double.
+ __ cvttsd2si(exponent, Operand(double_exponent));
+ // Skip to runtime if possibly NaN (indicated by the indefinite integer).
+ __ cmp(exponent, Immediate(0x80000000u));
+ __ j(equal, &call_runtime);
+ __ cvtsi2sd(double_scratch, exponent);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_exponent, double_scratch);
+ __ j(equal, &int_exponent);
+
+ if (exponent_type_ == ON_STACK) {
+ // Detect square root case. Crankshaft detects constant +/-0.5 at
+ // compile time and uses DoMathPowHalf instead. We then skip this check
+ // for non-constant cases of +/-0.5 as these hardly occur.
+ Label continue_sqrt, continue_rsqrt, not_plus_half;
+ // Test for 0.5.
+ // Load double_scratch with 0.5.
+ __ mov(scratch, Immediate(0x3F000000u));
+ __ movd(double_scratch, scratch);
+ __ cvtss2sd(double_scratch, double_scratch);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_scratch, double_exponent);
+ __ j(not_equal, &not_plus_half, Label::kNear);
+
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
+ // According to IEEE-754, single-precision -Infinity has the highest
+ // 9 bits set and the lowest 23 bits cleared.
+ __ mov(scratch, 0xFF800000u);
+ __ movd(double_scratch, scratch);
+ __ cvtss2sd(double_scratch, double_scratch);
+ __ ucomisd(double_base, double_scratch);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &continue_sqrt, Label::kNear);
+ __ j(carry, &continue_sqrt, Label::kNear);
+
+ // Set result to Infinity in the special case.
+ __ xorps(double_result, double_result);
+ __ subsd(double_result, double_scratch);
+ __ jmp(&done);
+
+ __ bind(&continue_sqrt);
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorps(double_scratch, double_scratch);
+ __ addsd(double_scratch, double_base); // Convert -0 to +0.
+ __ sqrtsd(double_result, double_scratch);
+ __ jmp(&done);
+
+ // Test for -0.5.
+ __ bind(&not_plus_half);
+ // Load double_exponent with -0.5 by substracting 1.
+ __ subsd(double_scratch, double_result);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_scratch, double_exponent);
+ __ j(not_equal, &fast_power, Label::kNear);
+
+ // Calculates reciprocal of square root of base. Check for the special
+ // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
+ // According to IEEE-754, single-precision -Infinity has the highest
+ // 9 bits set and the lowest 23 bits cleared.
+ __ mov(scratch, 0xFF800000u);
+ __ movd(double_scratch, scratch);
+ __ cvtss2sd(double_scratch, double_scratch);
+ __ ucomisd(double_base, double_scratch);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &continue_rsqrt, Label::kNear);
+ __ j(carry, &continue_rsqrt, Label::kNear);
+
+ // Set result to 0 in the special case.
+ __ xorps(double_result, double_result);
+ __ jmp(&done);
+
+ __ bind(&continue_rsqrt);
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorps(double_exponent, double_exponent);
+ __ addsd(double_exponent, double_base); // Convert -0 to +0.
+ __ sqrtsd(double_exponent, double_exponent);
+ __ divsd(double_result, double_exponent);
+ __ jmp(&done);
+ }
- // Optimized version of pow if exponent is a smi.
- // xmm0 contains the base.
- __ bind(&powi);
- __ SmiUntag(eax);
+ // Using FPU instructions to calculate power.
+ Label fast_power_failed;
+ __ bind(&fast_power);
+ __ fnclex(); // Clear flags to catch exceptions later.
+ // Transfer (B)ase and (E)xponent onto the FPU register stack.
+ __ sub(esp, Immediate(kDoubleSize));
+ __ movdbl(Operand(esp, 0), double_exponent);
+ __ fld_d(Operand(esp, 0)); // E
+ __ movdbl(Operand(esp, 0), double_base);
+ __ fld_d(Operand(esp, 0)); // B, E
+
+ // Exponent is in st(1) and base is in st(0)
+ // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
+ // FYL2X calculates st(1) * log2(st(0))
+ __ fyl2x(); // X
+ __ fld(0); // X, X
+ __ frndint(); // rnd(X), X
+ __ fsub(1); // rnd(X), X-rnd(X)
+ __ fxch(1); // X - rnd(X), rnd(X)
+ // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
+ __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
+ __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
+ __ faddp(1); // 1, 2^(X-rnd(X)), rnd(X)
+ // FSCALE calculates st(0) * 2^st(1)
+ __ fscale(); // 2^X, rnd(X)
+ __ fstp(1);
+ // Bail out to runtime in case of exceptions in the status word.
+ __ fnstsw_ax();
+ __ test_b(eax, 0x5F); // We check for all but precision exception.
+ __ j(not_zero, &fast_power_failed, Label::kNear);
+ __ fstp_d(Operand(esp, 0));
+ __ movdbl(double_result, Operand(esp, 0));
+ __ add(esp, Immediate(kDoubleSize));
+ __ jmp(&done);
- // Save exponent in base as we need to check if exponent is negative later.
- // We know that base and exponent are in different registers.
- __ mov(edx, eax);
+ __ bind(&fast_power_failed);
+ __ fninit();
+ __ add(esp, Immediate(kDoubleSize));
+ __ jmp(&call_runtime);
+ }
+
+ // Calculate power with integer exponent.
+ __ bind(&int_exponent);
+ const XMMRegister double_scratch2 = double_exponent;
+ __ mov(scratch, exponent); // Back up exponent.
+ __ movsd(double_scratch, double_base); // Back up base.
+ __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
// Get absolute value of exponent.
- Label no_neg;
- __ cmp(eax, 0);
- __ j(greater_equal, &no_neg, Label::kNear);
- __ neg(eax);
+ Label no_neg, while_true, no_multiply;
+ __ test(scratch, scratch);
+ __ j(positive, &no_neg, Label::kNear);
+ __ neg(scratch);
__ bind(&no_neg);
- // Load xmm1 with 1.
- __ movsd(xmm1, xmm3);
- Label while_true;
- Label no_multiply;
-
__ bind(&while_true);
- __ shr(eax, 1);
+ __ shr(scratch, 1);
__ j(not_carry, &no_multiply, Label::kNear);
- __ mulsd(xmm1, xmm0);
+ __ mulsd(double_result, double_scratch);
__ bind(&no_multiply);
- __ mulsd(xmm0, xmm0);
- __ j(not_zero, &while_true);
- // base has the original value of the exponent - if the exponent is
- // negative return 1/result.
- __ test(edx, edx);
- __ j(positive, &allocate_return);
- // Special case if xmm1 has reached infinity.
- __ mov(ecx, Immediate(0x7FB00000));
- __ movd(xmm0, ecx);
- __ cvtss2sd(xmm0, xmm0);
- __ ucomisd(xmm0, xmm1);
- __ j(equal, &call_runtime);
- __ divsd(xmm3, xmm1);
- __ movsd(xmm1, xmm3);
- __ jmp(&allocate_return);
-
- // exponent (or both) is a heapnumber - no matter what we should now work
- // on doubles.
- __ bind(&exponent_nonsmi);
- __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
- factory->heap_number_map());
- __ j(not_equal, &call_runtime);
- __ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
- // Test if exponent is nan.
- __ ucomisd(xmm1, xmm1);
- __ j(parity_even, &call_runtime);
+ __ mulsd(double_scratch, double_scratch);
+ __ j(not_zero, &while_true);
- Label base_not_smi;
- Label handle_special_cases;
- __ JumpIfNotSmi(edx, &base_not_smi, Label::kNear);
- __ SmiUntag(edx);
- __ cvtsi2sd(xmm0, edx);
- __ jmp(&handle_special_cases, Label::kNear);
-
- __ bind(&base_not_smi);
- __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- factory->heap_number_map());
- __ j(not_equal, &call_runtime);
- __ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
- __ and_(ecx, HeapNumber::kExponentMask);
- __ cmp(ecx, Immediate(HeapNumber::kExponentMask));
- // base is NaN or +/-Infinity
- __ j(greater_equal, &call_runtime);
- __ movdbl(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
+ // scratch has the original value of the exponent - if the exponent is
+ // negative, return 1/result.
+ __ test(exponent, exponent);
+ __ j(positive, &done);
+ __ divsd(double_scratch2, double_result);
+ __ movsd(double_result, double_scratch2);
+ // Test whether result is zero. Bail out to check for subnormal result.
+ // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
+ __ xorps(double_scratch2, double_scratch2);
+ __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
+ // double_exponent aliased as double_scratch2 has already been overwritten
+ // and may not have contained the exponent value in the first place when the
+ // exponent is a smi. We reset it with exponent value before bailing out.
+ __ j(not_equal, &done);
+ __ cvtsi2sd(double_exponent, exponent);
+
+ // Returning or bailing out.
+ Counters* counters = masm->isolate()->counters();
+ if (exponent_type_ == ON_STACK) {
+ // The arguments are still on the stack.
+ __ bind(&call_runtime);
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
- // base is in xmm0 and exponent is in xmm1.
- __ bind(&handle_special_cases);
- Label not_minus_half;
- // Test for -0.5.
- // Load xmm2 with -0.5.
- __ mov(ecx, Immediate(0xBF000000));
- __ movd(xmm2, ecx);
- __ cvtss2sd(xmm2, xmm2);
- // xmm2 now has -0.5.
- __ ucomisd(xmm2, xmm1);
- __ j(not_equal, &not_minus_half, Label::kNear);
-
- // Calculates reciprocal of square root.
- // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
- __ addsd(xmm1, xmm0);
- __ sqrtsd(xmm1, xmm1);
- __ divsd(xmm3, xmm1);
- __ movsd(xmm1, xmm3);
- __ jmp(&allocate_return);
-
- // Test for 0.5.
- __ bind(&not_minus_half);
- // Load xmm2 with 0.5.
- // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
- __ addsd(xmm2, xmm3);
- // xmm2 now has 0.5.
- __ ucomisd(xmm2, xmm1);
- __ j(not_equal, &call_runtime);
- // Calculates square root.
- // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
- __ addsd(xmm1, xmm0);
- __ sqrtsd(xmm1, xmm1);
-
- __ bind(&allocate_return);
- __ AllocateHeapNumber(ecx, eax, edx, &call_runtime);
- __ movdbl(FieldOperand(ecx, HeapNumber::kValueOffset), xmm1);
- __ mov(eax, ecx);
- __ ret(2 * kPointerSize);
+ // The stub is called from non-optimized code, which expects the result
+ // as heap number in exponent.
+ __ bind(&done);
+ __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
+ __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
+ __ IncrementCounter(counters->math_pow(), 1);
+ __ ret(2 * kPointerSize);
+ } else {
+ __ bind(&call_runtime);
+ {
+ AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(4, scratch);
+ __ movdbl(Operand(esp, 0 * kDoubleSize), double_base);
+ __ movdbl(Operand(esp, 1 * kDoubleSize), double_exponent);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()), 4);
+ }
+ // Return value is in st(0) on ia32.
+ // Store it into the (fixed) result register.
+ __ sub(esp, Immediate(kDoubleSize));
+ __ fstp_d(Operand(esp, 0));
+ __ movdbl(double_result, Operand(esp, 0));
+ __ add(esp, Immediate(kDoubleSize));
- __ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+ __ bind(&done);
+ __ IncrementCounter(counters->math_pow(), 1);
+ __ ret(0);
+ }
}
@@ -3187,7 +3380,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ mov(FieldOperand(eax, i), edx);
}
- // Setup the callee in-object property.
+ // Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ mov(edx, Operand(esp, 4 * kPointerSize));
__ mov(FieldOperand(eax, JSObject::kHeaderSize +
@@ -3200,7 +3393,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Heap::kArgumentsLengthIndex * kPointerSize),
ecx);
- // Setup the elements pointer in the allocated arguments object.
+ // Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, edi will point there, otherwise to the
// backing store.
__ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
@@ -3379,7 +3572,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the parameters pointer from the stack.
__ mov(edx, Operand(esp, 2 * kPointerSize));
- // Setup the elements pointer in the allocated arguments object and
+ // Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
@@ -3525,26 +3718,40 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
// First check for flat two byte string.
- __ and_(ebx,
- kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
+ __ and_(ebx, kIsNotStringMask |
+ kStringRepresentationMask |
+ kStringEncodingMask |
+ kShortExternalStringMask);
STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
__ j(zero, &seq_two_byte_string, Label::kNear);
- // Any other flat string must be a flat ascii string.
- __ and_(ebx, Immediate(kIsNotStringMask | kStringRepresentationMask));
+ // Any other flat string must be a flat ASCII string. None of the following
+ // string type tests will succeed if subject is not a string or a short
+ // external string.
+ __ and_(ebx, Immediate(kIsNotStringMask |
+ kStringRepresentationMask |
+ kShortExternalStringMask));
__ j(zero, &seq_ascii_string, Label::kNear);
+ // ebx: whether subject is a string and if yes, its string representation
// Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
// In the case of a sliced string its offset has to be taken into account.
- Label cons_string, check_encoding;
+ Label cons_string, external_string, check_encoding;
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
+ STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
__ cmp(ebx, Immediate(kExternalStringTag));
__ j(less, &cons_string);
- __ j(equal, &runtime);
+ __ j(equal, &external_string);
+
+ // Catch non-string subject or short external string.
+ STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
+ __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
+ __ j(not_zero, &runtime);
// String is sliced.
__ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
@@ -3566,16 +3773,16 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
kStringRepresentationMask | kStringEncodingMask);
STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
__ j(zero, &seq_two_byte_string, Label::kNear);
- // Any other flat string must be ascii.
+ // Any other flat string must be sequential ASCII or external.
__ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
kStringRepresentationMask);
- __ j(not_zero, &runtime);
+ __ j(not_zero, &external_string);
__ bind(&seq_ascii_string);
- // eax: subject string (flat ascii)
+ // eax: subject string (flat ASCII)
// ecx: RegExp data (FixedArray)
__ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
- __ Set(ecx, Immediate(1)); // Type is ascii.
+ __ Set(ecx, Immediate(1)); // Type is ASCII.
__ jmp(&check_code, Label::kNear);
__ bind(&seq_two_byte_string);
@@ -3592,7 +3799,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// eax: subject string
// edx: code
- // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
+ // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
// Load used arguments before starting to push arguments for call to native
// RegExp code to avoid handling changing stack height.
__ mov(ebx, Operand(esp, kPreviousIndexOffset));
@@ -3601,7 +3808,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// eax: subject string
// ebx: previous index
// edx: code
- // ecx: encoding of subject string (1 if ascii 0 if two_byte);
+ // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
// All checks done. Now push arguments for native regexp code.
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->regexp_entry_native(), 1);
@@ -3641,7 +3848,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// esi: original subject string
// eax: underlying subject string
// ebx: previous index
- // ecx: encoding of subject string (1 if ascii 0 if two_byte);
+ // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
// edx: code
// Argument 4: End of string data
// Argument 3: Start of string data
@@ -3716,7 +3923,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ Throw(eax);
__ bind(&throw_termination_exception);
- __ ThrowUncatchable(TERMINATION, eax);
+ __ ThrowUncatchable(eax);
__ bind(&failure);
// For failure to match, return null.
@@ -3790,6 +3997,27 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ mov(eax, Operand(esp, kLastMatchInfoOffset));
__ ret(4 * kPointerSize);
+ // External string. Short external strings have already been ruled out.
+ // eax: subject string (expected to be external)
+ // ebx: scratch
+ __ bind(&external_string);
+ __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ test_b(ebx, kIsIndirectStringMask);
+ __ Assert(zero, "external string expected, but not found");
+ }
+ __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ test_b(ebx, kStringEncodingMask);
+ __ j(not_zero, &seq_ascii_string);
+ __ jmp(&seq_two_byte_string);
+
// Do the runtime call to execute the regexp.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
@@ -4024,39 +4252,6 @@ void CompareStub::Generate(MacroAssembler* masm) {
// NOTICE! This code is only reached after a smi-fast-case check, so
// it is certain that at least one operand isn't a smi.
- {
- Label not_user_equal, user_equal;
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &not_user_equal);
- __ test(edx, Immediate(kSmiTagMask));
- __ j(zero, &not_user_equal);
-
- __ CmpObjectType(eax, JS_OBJECT_TYPE, ebx);
- __ j(not_equal, &not_user_equal);
-
- __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
- __ j(not_equal, &not_user_equal);
-
- __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
- 1 << Map::kUseUserObjectComparison);
- __ j(not_zero, &user_equal);
- __ test_b(FieldOperand(ecx, Map::kBitField2Offset),
- 1 << Map::kUseUserObjectComparison);
- __ j(not_zero, &user_equal);
-
- __ jmp(&not_user_equal);
-
- __ bind(&user_equal);
-
- __ pop(ebx); // Return address.
- __ push(eax);
- __ push(edx);
- __ push(ebx);
- __ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
- __ bind(&not_user_equal);
- }
-
// Identical objects can be compared fast, but there are some tricky cases
// for NaN and undefined.
{
@@ -4281,7 +4476,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
&check_unequal_objects);
- // Inline comparison of ascii strings.
+ // Inline comparison of ASCII strings.
if (cc_ == equal) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
edx,
@@ -4379,30 +4574,52 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
}
-void CallFunctionStub::FinishCode(Code* code) {
- code->set_has_function_cache(RecordCallTarget());
+void InterruptStub::Generate(MacroAssembler* masm) {
+ __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
-void CallFunctionStub::Clear(Heap* heap, Address address) {
- ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte);
- // 1 ~ size of the test eax opcode.
- Object* cell = Memory::Object_at(address + kPointerSize + 1);
- // Low-level because clearing happens during GC.
- reinterpret_cast<JSGlobalPropertyCell*>(cell)->set_value(
- RawUninitializedSentinel(heap));
-}
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+ // Cache the called function in a global property cell. Cache states
+ // are uninitialized, monomorphic (indicated by a JSFunction), and
+ // megamorphic.
+ // ebx : cache cell for call target
+ // edi : the function to call
+ Isolate* isolate = masm->isolate();
+ Label initialize, done;
+
+ // Load the cache state into ecx.
+ __ mov(ecx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
+
+ // A monomorphic cache hit or an already megamorphic state: invoke the
+ // function without changing the state.
+ __ cmp(ecx, edi);
+ __ j(equal, &done, Label::kNear);
+ __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+ __ j(equal, &done, Label::kNear);
+
+ // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+ // megamorphic.
+ __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate)));
+ __ j(equal, &initialize, Label::kNear);
+ // MegamorphicSentinel is an immortal immovable object (undefined) so no
+ // write-barrier is needed.
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+ __ jmp(&done, Label::kNear);
+ // An uninitialized cache is patched with the function.
+ __ bind(&initialize);
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), edi);
+ // No need for a write barrier here - cells are rescanned.
-Object* CallFunctionStub::GetCachedValue(Address address) {
- ASSERT(Memory::uint8_at(address + kPointerSize) == Assembler::kTestEaxByte);
- // 1 ~ size of the test eax opcode.
- Object* cell = Memory::Object_at(address + kPointerSize + 1);
- return JSGlobalPropertyCell::cast(cell)->value();
+ __ bind(&done);
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
+ // ebx : cache cell for call target
+ // edi : the function to call
Isolate* isolate = masm->isolate();
Label slow, non_function;
@@ -4418,16 +4635,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ cmp(eax, isolate->factory()->the_hole_value());
__ j(not_equal, &receiver_ok, Label::kNear);
// Patch the receiver on the stack with the global receiver object.
- __ mov(ebx, GlobalObjectOperand());
- __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
- __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx);
+ __ mov(ecx, GlobalObjectOperand());
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
+ __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ecx);
__ bind(&receiver_ok);
}
- // Get the function to call from the stack.
- // +2 ~ receiver, return address
- __ mov(edi, Operand(esp, (argc_ + 2) * kPointerSize));
-
// Check that the function really is a JavaScript function.
__ JumpIfSmi(edi, &non_function);
// Goto slow case if we do not have a function.
@@ -4435,44 +4648,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ j(not_equal, &slow);
if (RecordCallTarget()) {
- // Cache the called function in a global property cell in the
- // instruction stream after the call. Cache states are uninitialized,
- // monomorphic (indicated by a JSFunction), and megamorphic.
- Label initialize, call;
- // Load the cache cell address into ebx and the cache state into ecx.
- __ mov(ebx, Operand(esp, 0)); // Return address.
- __ mov(ebx, Operand(ebx, 1)); // 1 ~ sizeof 'test eax' opcode in bytes.
- __ mov(ecx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
-
- // A monomorphic cache hit or an already megamorphic state: invoke the
- // function without changing the state.
- __ cmp(ecx, edi);
- __ j(equal, &call, Label::kNear);
- __ cmp(ecx, Immediate(MegamorphicSentinel(isolate)));
- __ j(equal, &call, Label::kNear);
-
- // A monomorphic miss (i.e, here the cache is not uninitialized) goes
- // megamorphic.
- __ cmp(ecx, Immediate(UninitializedSentinel(isolate)));
- __ j(equal, &initialize, Label::kNear);
- // MegamorphicSentinel is a root so no write-barrier is needed.
- __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(MegamorphicSentinel(isolate)));
- __ jmp(&call, Label::kNear);
-
- // An uninitialized cache is patched with the function.
- __ bind(&initialize);
- __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset), edi);
- __ mov(ecx, edi);
- __ RecordWriteField(ebx,
- JSGlobalPropertyCell::kValueOffset,
- ecx,
- edx,
- kDontSaveFPRegs,
- OMIT_REMEMBERED_SET, // Cells are rescanned.
- OMIT_SMI_CHECK);
-
- __ bind(&call);
+ GenerateRecordCallTarget(masm);
}
// Fast-case: Just invoke the function.
@@ -4499,11 +4675,10 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ bind(&slow);
if (RecordCallTarget()) {
// If there is a call target cache, mark it megamorphic in the
- // non-function case.
- __ mov(ebx, Operand(esp, 0));
- __ mov(ebx, Operand(ebx, 1));
+ // non-function case. MegamorphicSentinel is an immortal immovable
+ // object (undefined) so no write barrier is needed.
__ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
- Immediate(MegamorphicSentinel(isolate)));
+ Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
}
// Check for function proxy.
__ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
@@ -4533,6 +4708,50 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
}
+void CallConstructStub::Generate(MacroAssembler* masm) {
+ // eax : number of arguments
+ // ebx : cache cell for call target
+ // edi : constructor function
+ Label slow, non_function_call;
+
+ // Check that function is not a smi.
+ __ JumpIfSmi(edi, &non_function_call);
+ // Check that function is a JSFunction.
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
+ __ j(not_equal, &slow);
+
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
+ // Jump to the function-specific construct stub.
+ __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kConstructStubOffset));
+ __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
+ __ jmp(ebx);
+
+ // edi: called object
+ // eax: number of arguments
+ // ecx: object map
+ Label do_call;
+ __ bind(&slow);
+ __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
+ __ j(not_equal, &non_function_call);
+ __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
+ __ jmp(&do_call);
+
+ __ bind(&non_function_call);
+ __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
+ __ bind(&do_call);
+ // Set expected number of arguments to zero (not changing eax).
+ __ Set(ebx, Immediate(0));
+ Handle<Code> arguments_adaptor =
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ __ SetCallKind(ecx, CALL_AS_METHOD);
+ __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
+}
+
+
bool CEntryStub::NeedsImmovableCode() {
return false;
}
@@ -4567,11 +4786,6 @@ void CEntryStub::GenerateAheadOfTime() {
}
-void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- __ Throw(eax);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
@@ -4690,12 +4904,6 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
}
-void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
- UncatchableExceptionType type) {
- __ ThrowUncatchable(type, eax);
-}
-
-
void CEntryStub::Generate(MacroAssembler* masm) {
// eax: number of arguments including receiver
// ebx: pointer to C function (C callee-saved)
@@ -4749,21 +4957,32 @@ void CEntryStub::Generate(MacroAssembler* masm) {
true);
__ bind(&throw_out_of_memory_exception);
- GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
+ // Set external caught exception to false.
+ Isolate* isolate = masm->isolate();
+ ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
+ isolate);
+ __ mov(Operand::StaticVariable(external_caught), Immediate(false));
+
+ // Set pending exception and eax to out of memory exception.
+ ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
+ isolate);
+ __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
+ __ mov(Operand::StaticVariable(pending_exception), eax);
+ // Fall through to the next label.
__ bind(&throw_termination_exception);
- GenerateThrowUncatchable(masm, TERMINATION);
+ __ ThrowUncatchable(eax);
__ bind(&throw_normal_exception);
- GenerateThrowTOS(masm);
+ __ Throw(eax);
}
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
- Label invoke, exit;
+ Label invoke, handler_entry, exit;
Label not_outermost_js, not_outermost_js_2;
- // Setup frame.
+ // Set up frame.
__ push(ebp);
__ mov(ebp, esp);
@@ -4793,20 +5012,23 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
__ bind(&cont);
- // Call a faked try-block that does the invoke.
- __ call(&invoke);
-
- // Caught exception: Store result (exception) in the pending
- // exception field in the JSEnv and return a failure sentinel.
+ // Jump to a faked try block that does the invoke, with a faked catch
+ // block that sets the pending exception.
+ __ jmp(&invoke);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel.
ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
masm->isolate());
__ mov(Operand::StaticVariable(pending_exception), eax);
__ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
__ jmp(&exit);
- // Invoke: Link this frame into the handler chain.
+ // Invoke: Link this frame into the handler chain. There's only one
+ // handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
@@ -4815,14 +5037,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Fake a receiver (NULL).
__ push(Immediate(0)); // receiver
- // Invoke the function by calling through JS entry trampoline
- // builtin and pop the faked function when we return. Notice that we
- // cannot store a reference to the trampoline code directly in this
- // stub, because the builtin stubs may not have been generated yet.
+ // Invoke the function by calling through JS entry trampoline builtin and
+ // pop the faked function when we return. Notice that we cannot store a
+ // reference to the trampoline code directly in this stub, because the
+ // builtin stubs may not have been generated yet.
if (is_construct) {
- ExternalReference construct_entry(
- Builtins::kJSConstructEntryTrampoline,
- masm->isolate());
+ ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
+ masm->isolate());
__ mov(edx, Immediate(construct_entry));
} else {
ExternalReference entry(Builtins::kJSEntryTrampoline,
@@ -4892,8 +5113,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
static const int kDeltaToCmpImmediate = 2;
static const int kDeltaToMov = 8;
static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
- static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
+ static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
+ static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
ExternalReference roots_array_start =
@@ -4958,12 +5179,13 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
+ __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
- __ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
+ __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
}
- __ mov(Operand(scratch, kDeltaToCmpImmediate), map);
+ __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
+ __ mov(Operand(scratch, 0), map);
}
// Loop through the prototype chain of the object looking for the function
@@ -5132,11 +5354,6 @@ void CompareStub::PrintName(StringStream* stream) {
// StringCharCodeAtGenerator
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
- Label flat_string;
- Label ascii_string;
- Label got_char_code;
- Label sliced_string;
-
// If the receiver is a smi trigger the non-string case.
STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(object_, receiver_not_string_);
@@ -5151,80 +5368,18 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// If the index is non-smi trigger the non-smi case.
STATIC_ASSERT(kSmiTag == 0);
__ JumpIfNotSmi(index_, &index_not_smi_);
-
- // Put smi-tagged index into scratch register.
- __ mov(scratch_, index_);
__ bind(&got_smi_index_);
// Check for index out of range.
- __ cmp(scratch_, FieldOperand(object_, String::kLengthOffset));
+ __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
__ j(above_equal, index_out_of_range_);
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ test(result_, Immediate(kStringRepresentationMask));
- __ j(zero, &flat_string);
+ __ SmiUntag(index_);
- // Handle non-flat strings.
- __ and_(result_, kStringRepresentationMask);
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
- __ cmp(result_, kExternalStringTag);
- __ j(greater, &sliced_string, Label::kNear);
- __ j(equal, &call_runtime_);
-
- // ConsString.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- Label assure_seq_string;
- __ cmp(FieldOperand(object_, ConsString::kSecondOffset),
- Immediate(masm->isolate()->factory()->empty_string()));
- __ j(not_equal, &call_runtime_);
- // Get the first of the two strings and load its instance type.
- __ mov(result_, FieldOperand(object_, ConsString::kFirstOffset));
- __ jmp(&assure_seq_string, Label::kNear);
-
- // SlicedString, unpack and add offset.
- __ bind(&sliced_string);
- __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
- __ mov(result_, FieldOperand(object_, SlicedString::kParentOffset));
+ Factory* factory = masm->isolate()->factory();
+ StringCharLoadGenerator::Generate(
+ masm, factory, object_, index_, result_, &call_runtime_);
- // Assure that we are dealing with a sequential string. Go to runtime if not.
- __ bind(&assure_seq_string);
- __ mov(result_, FieldOperand(result_, HeapObject::kMapOffset));
- __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
- STATIC_ASSERT(kSeqStringTag == 0);
- __ test(result_, Immediate(kStringRepresentationMask));
- __ j(not_zero, &call_runtime_);
- // Actually fetch the parent string if it is confirmed to be sequential.
- STATIC_ASSERT(SlicedString::kParentOffset == ConsString::kFirstOffset);
- __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
-
- // Check for 1-byte or 2-byte string.
- __ bind(&flat_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ test(result_, Immediate(kStringEncodingMask));
- __ j(not_zero, &ascii_string, Label::kNear);
-
- // 2-byte string.
- // Load the 2-byte character code into the result register.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- __ movzx_w(result_, FieldOperand(object_,
- scratch_, times_1, // Scratch is smi-tagged.
- SeqTwoByteString::kHeaderSize));
- __ jmp(&got_char_code, Label::kNear);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ SmiUntag(scratch_);
- __ movzx_b(result_, FieldOperand(object_,
- scratch_, times_1,
- SeqAsciiString::kHeaderSize));
- __ bind(&got_char_code);
__ SmiTag(result_);
__ bind(&exit_);
}
@@ -5244,7 +5399,6 @@ void StringCharCodeAtGenerator::GenerateSlow(
DONT_DO_SMI_CHECK);
call_helper.BeforeCall(masm);
__ push(object_);
- __ push(index_);
__ push(index_); // Consumed by runtime conversion function.
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
__ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
@@ -5253,12 +5407,11 @@ void StringCharCodeAtGenerator::GenerateSlow(
// NumberToSmi discards numbers that are not exact integers.
__ CallRuntime(Runtime::kNumberToSmi, 1);
}
- if (!scratch_.is(eax)) {
+ if (!index_.is(eax)) {
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
- __ mov(scratch_, eax);
+ __ mov(index_, eax);
}
- __ pop(index_);
__ pop(object_);
// Reload the instance type.
__ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
@@ -5266,7 +5419,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
// If index is still not a smi, it must be out of range.
STATIC_ASSERT(kSmiTag == 0);
- __ JumpIfNotSmi(scratch_, index_out_of_range_);
+ __ JumpIfNotSmi(index_, index_out_of_range_);
// Otherwise, return to the fast path.
__ jmp(&got_smi_index_);
@@ -5276,6 +5429,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ bind(&call_runtime_);
call_helper.BeforeCall(masm);
__ push(object_);
+ __ SmiTag(index_);
__ push(index_);
__ CallRuntime(Runtime::kStringCharCodeAt, 2);
if (!result_.is(eax)) {
@@ -5306,7 +5460,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiShiftSize == 0);
- // At this point code register contains smi tagged ascii char code.
+ // At this point code register contains smi tagged ASCII char code.
__ mov(result_, FieldOperand(result_,
code_, times_half_pointer_size,
FixedArray::kHeaderSize));
@@ -5353,7 +5507,7 @@ void StringCharAtGenerator::GenerateSlow(
void StringAddStub::Generate(MacroAssembler* masm) {
- Label string_add_runtime, call_builtin;
+ Label call_runtime, call_builtin;
Builtins::JavaScript builtin_id = Builtins::ADD;
// Load the two arguments.
@@ -5362,14 +5516,14 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Make sure that both arguments are strings if not known in advance.
if (flags_ == NO_STRING_ADD_FLAGS) {
- __ JumpIfSmi(eax, &string_add_runtime);
+ __ JumpIfSmi(eax, &call_runtime);
__ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
- __ j(above_equal, &string_add_runtime);
+ __ j(above_equal, &call_runtime);
// First argument is a a string, test second.
- __ JumpIfSmi(edx, &string_add_runtime);
+ __ JumpIfSmi(edx, &call_runtime);
__ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
- __ j(above_equal, &string_add_runtime);
+ __ j(above_equal, &call_runtime);
} else {
// Here at least one of the arguments is definitely a string.
// We convert the one that is not known to be a string.
@@ -5420,15 +5574,14 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ add(ebx, ecx);
STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
// Handle exceptionally long strings in the runtime system.
- __ j(overflow, &string_add_runtime);
+ __ j(overflow, &call_runtime);
// Use the symbol table when adding two one character strings, as it
// helps later optimizations to return a symbol here.
__ cmp(ebx, Immediate(Smi::FromInt(2)));
__ j(not_equal, &longer_than_two);
- // Check that both strings are non-external ascii strings.
- __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx,
- &string_add_runtime);
+ // Check that both strings are non-external ASCII strings.
+ __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, &call_runtime);
// Get the two characters forming the new string.
__ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
@@ -5453,11 +5606,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
__ bind(&make_two_character_string_no_reload);
__ IncrementCounter(counters->string_add_make_two_char(), 1);
- __ AllocateAsciiString(eax, // Result.
- 2, // Length.
- edi, // Scratch 1.
- edx, // Scratch 2.
- &string_add_runtime);
+ __ AllocateAsciiString(eax, 2, edi, edx, &call_runtime);
// Pack both characters in ebx.
__ shl(ecx, kBitsPerByte);
__ or_(ebx, ecx);
@@ -5468,11 +5617,11 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ bind(&longer_than_two);
// Check if resulting string will be flat.
- __ cmp(ebx, Immediate(Smi::FromInt(String::kMinNonFlatLength)));
+ __ cmp(ebx, Immediate(Smi::FromInt(ConsString::kMinLength)));
__ j(below, &string_add_flat_result);
// If result is not supposed to be flat allocate a cons string object. If both
- // strings are ascii the result is an ascii cons string.
+ // strings are ASCII the result is an ASCII cons string.
Label non_ascii, allocated, ascii_data;
__ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
@@ -5484,8 +5633,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ test(ecx, Immediate(kStringEncodingMask));
__ j(zero, &non_ascii);
__ bind(&ascii_data);
- // Allocate an acsii cons string.
- __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime);
+ // Allocate an ASCII cons string.
+ __ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
if (FLAG_debug_code) __ AbortIfNotSmi(ebx);
@@ -5499,7 +5648,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ascii characters.
+ // to contain only ASCII characters.
// ecx: first instance type AND second instance type.
// edi: second instance type.
__ test(ecx, Immediate(kAsciiDataHintMask));
@@ -5512,64 +5661,93 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ cmp(edi, kAsciiStringTag | kAsciiDataHintTag);
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
- __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime);
+ __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
__ jmp(&allocated);
- // Handle creating a flat result. First check that both strings are not
- // external strings.
+ // We cannot encounter sliced strings or cons strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
+ // Handle creating a flat result from either external or sequential strings.
+ // Locate the first characters' locations.
// eax: first string
// ebx: length of resulting flat string as a smi
// edx: second string
+ Label first_prepared, second_prepared;
+ Label first_is_sequential, second_is_sequential;
__ bind(&string_add_flat_result);
__ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ and_(ecx, kStringRepresentationMask);
- __ cmp(ecx, kExternalStringTag);
- __ j(equal, &string_add_runtime);
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ and_(ecx, kStringRepresentationMask);
- __ cmp(ecx, kExternalStringTag);
- __ j(equal, &string_add_runtime);
- // We cannot encounter sliced strings here since:
- STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
- // Now check if both strings are ascii strings.
- // eax: first string
- // ebx: length of resulting flat string as a smi
- // edx: second string
- Label non_ascii_string_add_flat_result;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
- __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
+ // ecx: instance type of first string
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ test_b(ecx, kStringRepresentationMask);
+ __ j(zero, &first_is_sequential, Label::kNear);
+ // Rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ test_b(ecx, kShortExternalStringMask);
+ __ j(not_zero, &call_runtime);
+ __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ jmp(&first_prepared, Label::kNear);
+ __ bind(&first_is_sequential);
+ __ add(eax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ bind(&first_prepared);
+
+ __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
+ __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
+ // Check whether both strings have same encoding.
+ // edi: instance type of second string
+ __ xor_(ecx, edi);
+ __ test_b(ecx, kStringEncodingMask);
+ __ j(not_zero, &call_runtime);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ test_b(edi, kStringRepresentationMask);
+ __ j(zero, &second_is_sequential, Label::kNear);
+ // Rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ test_b(edi, kShortExternalStringMask);
+ __ j(not_zero, &call_runtime);
+ __ mov(edx, FieldOperand(edx, ExternalString::kResourceDataOffset));
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ jmp(&second_prepared, Label::kNear);
+ __ bind(&second_is_sequential);
+ __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ bind(&second_prepared);
+
+ // Push the addresses of both strings' first characters onto the stack.
+ __ push(edx);
+ __ push(eax);
+
+ Label non_ascii_string_add_flat_result, call_runtime_drop_two;
+ // edi: instance type of second string
+ // First string and second string have the same encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ test_b(edi, kStringEncodingMask);
__ j(zero, &non_ascii_string_add_flat_result);
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
- __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
- __ j(zero, &string_add_runtime);
- // Both strings are ascii strings. As they are short they are both flat.
+ // Both strings are ASCII strings.
// ebx: length of resulting flat string as a smi
__ SmiUntag(ebx);
- __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime);
+ __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
// eax: result string
__ mov(ecx, eax);
// Locate first character of result.
__ add(ecx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // Load first argument and locate first character.
- __ mov(edx, Operand(esp, 2 * kPointerSize));
+ // Load first argument's length and first character location. Account for
+ // values currently on the stack when fetching arguments from it.
+ __ mov(edx, Operand(esp, 4 * kPointerSize));
__ mov(edi, FieldOperand(edx, String::kLengthOffset));
__ SmiUntag(edi);
- __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ pop(edx);
// eax: result string
// ecx: first character of result
// edx: first char of first argument
// edi: length of first argument
StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
- // Load second argument and locate first character.
- __ mov(edx, Operand(esp, 1 * kPointerSize));
+ // Load second argument's length and first character location. Account for
+ // values currently on the stack when fetching arguments from it.
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
__ mov(edi, FieldOperand(edx, String::kLengthOffset));
__ SmiUntag(edi);
- __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ pop(edx);
// eax: result string
// ecx: next character of result
// edx: first char of second argument
@@ -5583,34 +5761,30 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// ebx: length of resulting flat string as a smi
// edx: second string
__ bind(&non_ascii_string_add_flat_result);
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
- __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask);
- __ j(not_zero, &string_add_runtime);
- // Both strings are two byte strings. As they are short they are both
- // flat.
+ // Both strings are two byte strings.
__ SmiUntag(ebx);
- __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime);
+ __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
// eax: result string
__ mov(ecx, eax);
// Locate first character of result.
- __ add(ecx,
- Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // Load first argument and locate first character.
- __ mov(edx, Operand(esp, 2 * kPointerSize));
+ __ add(ecx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ // Load second argument's length and first character location. Account for
+ // values currently on the stack when fetching arguments from it.
+ __ mov(edx, Operand(esp, 4 * kPointerSize));
__ mov(edi, FieldOperand(edx, String::kLengthOffset));
__ SmiUntag(edi);
- __ add(edx,
- Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ __ pop(edx);
// eax: result string
// ecx: first character of result
// edx: first char of first argument
// edi: length of first argument
StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
- // Load second argument and locate first character.
- __ mov(edx, Operand(esp, 1 * kPointerSize));
+ // Load second argument's length and first character location. Account for
+ // values currently on the stack when fetching arguments from it.
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
__ mov(edi, FieldOperand(edx, String::kLengthOffset));
__ SmiUntag(edi);
- __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ pop(edx);
// eax: result string
// ecx: next character of result
// edx: first char of second argument
@@ -5619,8 +5793,11 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
+ // Recover stack pointer before jumping to runtime.
+ __ bind(&call_runtime_drop_two);
+ __ Drop(2);
// Just jump to runtime to add the two strings.
- __ bind(&string_add_runtime);
+ __ bind(&call_runtime);
__ TailCallRuntime(Runtime::kStringAdd, 2, 1);
if (call_builtin.is_linked()) {
@@ -5822,6 +5999,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
static const int kProbes = 4;
Label found_in_symbol_table;
Label next_probe[kProbes], next_probe_pop_mask[kProbes];
+ Register candidate = scratch; // Scratch register contains candidate.
for (int i = 0; i < kProbes; i++) {
// Calculate entry in symbol table.
__ mov(scratch, hash);
@@ -5831,7 +6009,6 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ and_(scratch, mask);
// Load the entry from the symbol table.
- Register candidate = scratch; // Scratch register contains candidate.
STATIC_ASSERT(SymbolTable::kEntrySize == 1);
__ mov(candidate,
FieldOperand(symbol_table,
@@ -5843,7 +6020,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
Factory* factory = masm->isolate()->factory();
__ cmp(candidate, factory->undefined_value());
__ j(equal, not_found);
- __ cmp(candidate, factory->null_value());
+ __ cmp(candidate, factory->the_hole_value());
__ j(equal, &next_probe[i]);
// If length is not 2 the string is not a candidate.
@@ -5856,7 +6033,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ push(mask);
Register temp = mask;
- // Check that the candidate is a non-external ascii string.
+ // Check that the candidate is a non-external ASCII string.
__ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
__ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(
@@ -5876,7 +6053,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ jmp(not_found);
// Scratch register contains result when we fall through to here.
- Register result = scratch;
+ Register result = candidate;
__ bind(&found_in_symbol_table);
__ pop(mask); // Pop saved mask from the stack.
if (!result.is(eax)) {
@@ -5889,13 +6066,28 @@ void StringHelper::GenerateHashInit(MacroAssembler* masm,
Register hash,
Register character,
Register scratch) {
- // hash = character + (character << 10);
- __ mov(hash, character);
- __ shl(hash, 10);
- __ add(hash, character);
+ // hash = (seed + character) + ((seed + character) << 10);
+ if (Serializer::enabled()) {
+ ExternalReference roots_array_start =
+ ExternalReference::roots_array_start(masm->isolate());
+ __ mov(scratch, Immediate(Heap::kHashSeedRootIndex));
+ __ mov(scratch, Operand::StaticArray(scratch,
+ times_pointer_size,
+ roots_array_start));
+ __ SmiUntag(scratch);
+ __ add(scratch, character);
+ __ mov(hash, scratch);
+ __ shl(scratch, 10);
+ __ add(hash, scratch);
+ } else {
+ int32_t seed = masm->isolate()->heap()->HashSeed();
+ __ lea(scratch, Operand(character, seed));
+ __ shl(scratch, 10);
+ __ lea(hash, Operand(scratch, character, times_1, seed));
+ }
// hash ^= hash >> 6;
__ mov(scratch, hash);
- __ sar(scratch, 6);
+ __ shr(scratch, 6);
__ xor_(hash, scratch);
}
@@ -5912,7 +6104,7 @@ void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
__ add(hash, scratch);
// hash ^= hash >> 6;
__ mov(scratch, hash);
- __ sar(scratch, 6);
+ __ shr(scratch, 6);
__ xor_(hash, scratch);
}
@@ -5926,18 +6118,19 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
__ add(hash, scratch);
// hash ^= hash >> 11;
__ mov(scratch, hash);
- __ sar(scratch, 11);
+ __ shr(scratch, 11);
__ xor_(hash, scratch);
// hash += hash << 15;
__ mov(scratch, hash);
__ shl(scratch, 15);
__ add(hash, scratch);
+ __ and_(hash, String::kHashBitMask);
+
// if (hash == 0) hash = 27;
Label hash_not_zero;
- __ test(hash, hash);
__ j(not_zero, &hash_not_zero, Label::kNear);
- __ mov(hash, Immediate(27));
+ __ mov(hash, Immediate(StringHasher::kZeroHash));
__ bind(&hash_not_zero);
}
@@ -5962,100 +6155,70 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// ebx: instance type
// Calculate length of sub string using the smi values.
- Label result_longer_than_two;
__ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
__ JumpIfNotSmi(ecx, &runtime);
__ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
__ JumpIfNotSmi(edx, &runtime);
__ sub(ecx, edx);
__ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
- Label return_eax;
- __ j(equal, &return_eax);
- // Special handling of sub-strings of length 1 and 2. One character strings
- // are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache.
- __ SmiUntag(ecx); // Result length is no longer smi.
- __ cmp(ecx, 2);
- __ j(greater, &result_longer_than_two);
- __ j(less, &runtime);
-
- // Sub string of length 2 requested.
+ Label not_original_string;
+ __ j(not_equal, &not_original_string, Label::kNear);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->sub_string_native(), 1);
+ __ ret(3 * kPointerSize);
+ __ bind(&not_original_string);
+
// eax: string
// ebx: instance type
- // ecx: sub string length (value is 2)
+ // ecx: sub string length (smi)
// edx: from index (smi)
- __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &runtime);
+ // Deal with different string types: update the index if necessary
+ // and put the underlying string into edi.
+ Label underlying_unpacked, sliced_string, seq_or_external_string;
+ // If the string is not indirect, it can only be sequential or external.
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ test(ebx, Immediate(kIsIndirectStringMask));
+ __ j(zero, &seq_or_external_string, Label::kNear);
- // Get the two characters forming the sub string.
- __ SmiUntag(edx); // From index is no longer smi.
- __ movzx_b(ebx, FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize));
- __ movzx_b(ecx,
- FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1));
-
- // Try to lookup two character string in symbol table.
- Label make_two_character_string;
- StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, ebx, ecx, eax, edx, edi,
- &make_two_character_string, &make_two_character_string);
- __ ret(3 * kPointerSize);
+ Factory* factory = masm->isolate()->factory();
+ __ test(ebx, Immediate(kSlicedNotConsMask));
+ __ j(not_zero, &sliced_string, Label::kNear);
+ // Cons string. Check whether it is flat, then fetch first part.
+ // Flat cons strings have an empty second part.
+ __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
+ factory->empty_string());
+ __ j(not_equal, &runtime);
+ __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
+ // Update instance type.
+ __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
+ __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked, Label::kNear);
- __ bind(&make_two_character_string);
- // Setup registers for allocating the two character string.
- __ mov(eax, Operand(esp, 3 * kPointerSize));
- __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and adjust start index by offset.
+ __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
+ __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
+ // Update instance type.
+ __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
__ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
- __ Set(ecx, Immediate(2));
+ __ jmp(&underlying_unpacked, Label::kNear);
+
+ __ bind(&seq_or_external_string);
+ // Sequential or external string. Just move string to the expected register.
+ __ mov(edi, eax);
+
+ __ bind(&underlying_unpacked);
if (FLAG_string_slices) {
Label copy_routine;
- // If coming from the make_two_character_string path, the string
- // is too short to be sliced anyways.
- STATIC_ASSERT(2 < SlicedString::kMinLength);
- __ jmp(&copy_routine);
- __ bind(&result_longer_than_two);
-
- // eax: string
- // ebx: instance type
- // ecx: sub string length
- // edx: from index (smi)
- Label allocate_slice, sliced_string, seq_string;
- __ cmp(ecx, SlicedString::kMinLength);
+ // edi: underlying subject string
+ // ebx: instance type of underlying subject string
+ // edx: adjusted start index (smi)
+ // ecx: length (smi)
+ __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
// Short slice. Copy instead of slicing.
__ j(less, &copy_routine);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ test(ebx, Immediate(kStringRepresentationMask));
- __ j(zero, &seq_string, Label::kNear);
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ test(ebx, Immediate(kIsIndirectStringMask));
- // External string. Jump to runtime.
- __ j(zero, &runtime);
-
- Factory* factory = masm->isolate()->factory();
- __ test(ebx, Immediate(kSlicedNotConsMask));
- __ j(not_zero, &sliced_string, Label::kNear);
- // Cons string. Check whether it is flat, then fetch first part.
- __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
- factory->empty_string());
- __ j(not_equal, &runtime);
- __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
- __ jmp(&allocate_slice, Label::kNear);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
- __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
- __ jmp(&allocate_slice, Label::kNear);
-
- __ bind(&seq_string);
- // Sequential string. Just move string to the right register.
- __ mov(edi, eax);
-
- __ bind(&allocate_slice);
- // edi: underlying subject string
- // ebx: instance type of original subject string
- // edx: offset
- // ecx: length
// Allocate new sliced string. At this point we do not reload the instance
// type including the string encoding because we simply rely on the info
// provided by the original string. It does not matter if the original
@@ -6071,28 +6234,50 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
__ bind(&set_slice_header);
- __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
- __ SmiTag(ecx);
__ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
- __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
__ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
- __ jmp(&return_eax);
+ __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
+ __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
+ __ IncrementCounter(counters->sub_string_native(), 1);
+ __ ret(3 * kPointerSize);
__ bind(&copy_routine);
- } else {
- __ bind(&result_longer_than_two);
}
- // eax: string
- // ebx: instance type
- // ecx: result string length
- // Check for flat ascii string
- Label non_ascii_flat;
- __ JumpIfInstanceTypeIsNotSequentialAscii(ebx, ebx, &non_ascii_flat);
+ // edi: underlying subject string
+ // ebx: instance type of underlying subject string
+ // edx: adjusted start index (smi)
+ // ecx: length (smi)
+ // The subject string can only be external or sequential string of either
+ // encoding at this point.
+ Label two_byte_sequential, runtime_drop_two, sequential_string;
+ STATIC_ASSERT(kExternalStringTag != 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ test_b(ebx, kExternalStringTag);
+ __ j(zero, &sequential_string);
+
+ // Handle external string.
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ test_b(ebx, kShortExternalStringMask);
+ __ j(not_zero, &runtime);
+ __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+
+ __ bind(&sequential_string);
+ // Stash away (adjusted) index and (underlying) string.
+ __ push(edx);
+ __ push(edi);
+ __ SmiUntag(ecx);
+ STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ __ test_b(ebx, kStringEncodingMask);
+ __ j(zero, &two_byte_sequential);
- // Allocate the result.
- __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime);
+ // Sequential ASCII string. Allocate the result.
+ __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
// eax: result string
// ecx: result string length
@@ -6101,11 +6286,10 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ mov(edi, eax);
__ add(edi, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
// Load string argument and locate character of sub string start.
- __ mov(esi, Operand(esp, 3 * kPointerSize));
- __ add(esi, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
+ __ pop(esi);
+ __ pop(ebx);
__ SmiUntag(ebx);
- __ add(esi, ebx);
+ __ lea(esi, FieldOperand(esi, ebx, times_1, SeqAsciiString::kHeaderSize));
// eax: result string
// ecx: result length
@@ -6114,20 +6298,12 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// esi: character of sub string start
StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
__ mov(esi, edx); // Restore esi.
- Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
- __ bind(&non_ascii_flat);
- // eax: string
- // ebx: instance type & kStringRepresentationMask | kStringEncodingMask
- // ecx: result string length
- // Check for flat two byte string
- __ cmp(ebx, kSeqStringTag | kTwoByteStringTag);
- __ j(not_equal, &runtime);
-
- // Allocate the result.
- __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime);
+ __ bind(&two_byte_sequential);
+ // Sequential two-byte string. Allocate the result.
+ __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
// eax: result string
// ecx: result string length
@@ -6137,14 +6313,13 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ add(edi,
Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
// Load string argument and locate character of sub string start.
- __ mov(esi, Operand(esp, 3 * kPointerSize));
- __ add(esi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- __ mov(ebx, Operand(esp, 2 * kPointerSize)); // from
+ __ pop(esi);
+ __ pop(ebx);
// As from is a smi it is 2 times the value which matches the size of a two
// byte character.
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
- __ add(esi, ebx);
+ __ lea(esi, FieldOperand(esi, ebx, times_1, SeqTwoByteString::kHeaderSize));
// eax: result string
// ecx: result length
@@ -6153,11 +6328,13 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// esi: character of sub string start
StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
__ mov(esi, edx); // Restore esi.
-
- __ bind(&return_eax);
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
+ // Drop pushed values on the stack before tail call.
+ __ bind(&runtime_drop_two);
+ __ Drop(2);
+
// Just jump to runtime to create the sub string.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kSubString, 3, 1);
@@ -6285,7 +6462,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
__ mov_b(scratch, Operand(left, index, times_1, 0));
__ cmpb(scratch, Operand(right, index, times_1, 0));
__ j(not_equal, chars_not_equal, chars_not_equal_near);
- __ add(index, Immediate(1));
+ __ inc(index);
__ j(not_zero, &loop);
}
@@ -6312,10 +6489,10 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
__ bind(&not_same);
- // Check that both objects are sequential ascii strings.
+ // Check that both objects are sequential ASCII strings.
__ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
- // Compare flat ascii strings.
+ // Compare flat ASCII strings.
// Drop arguments from the stack.
__ pop(ecx);
__ add(esp, Immediate(2 * kPointerSize));
@@ -6359,16 +6536,16 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ mov(ecx, edx);
__ and_(ecx, eax);
__ JumpIfSmi(ecx, &generic_stub, Label::kNear);
__ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined1, Label::kNear);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or SS2 or CMOV is unsupported.
@@ -6394,14 +6571,28 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ mov(ecx, Immediate(Smi::FromInt(-1)));
__ cmov(below, eax, ecx);
__ ret(0);
-
- __ bind(&unordered);
}
+ __ bind(&unordered);
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
__ bind(&generic_stub);
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ j(not_equal, &miss);
+ __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ j(equal, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -6454,9 +6645,10 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
- ASSERT(GetCondition() == equal);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = edx;
Register right = eax;
@@ -6495,25 +6687,33 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ bind(&not_same);
// Check that both strings are symbols. If they are, we're done
- // because we already know they are not identical.
- Label do_compare;
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp1, tmp2);
- __ test(tmp1, Immediate(kIsSymbolMask));
- __ j(zero, &do_compare, Label::kNear);
- // Make sure eax is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(eax));
- __ ret(0);
+ // because we already know they are not identical. But in the case of
+ // non-equality compare, we still need to determine the order.
+ if (equality) {
+ Label do_compare;
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp1, tmp2);
+ __ test(tmp1, Immediate(kIsSymbolMask));
+ __ j(zero, &do_compare, Label::kNear);
+ // Make sure eax is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(eax));
+ __ ret(0);
+ __ bind(&do_compare);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ bind(&do_compare);
__ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
@@ -6521,7 +6721,11 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ push(left);
__ push(right);
__ push(tmp1);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -6537,14 +6741,8 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
__ j(not_equal, &miss, Label::kNear);
- __ test_b(FieldOperand(ecx, Map::kBitField2Offset),
- 1 << Map::kUseUserObjectComparison);
- __ j(not_zero, &miss, Label::kNear);
__ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
__ j(not_equal, &miss, Label::kNear);
- __ test_b(FieldOperand(ecx, Map::kBitField2Offset),
- 1 << Map::kUseUserObjectComparison);
- __ j(not_zero, &miss, Label::kNear);
ASSERT(GetCondition() == equal);
__ sub(eax, edx);
@@ -6555,33 +6753,45 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
}
-void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
- // Save the registers.
- __ pop(ecx);
- __ push(edx);
- __ push(eax);
- __ push(ecx);
+void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
+ Label miss;
+ __ mov(ecx, edx);
+ __ and_(ecx, eax);
+ __ JumpIfSmi(ecx, &miss, Label::kNear);
+
+ __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ cmp(ecx, known_map_);
+ __ j(not_equal, &miss, Label::kNear);
+ __ cmp(ebx, known_map_);
+ __ j(not_equal, &miss, Label::kNear);
+
+ __ sub(eax, edx);
+ __ ret(0);
+
+ __ bind(&miss);
+ GenerateMiss(masm);
+}
+
+void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
masm->isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(edx);
+ __ push(edx); // Preserve edx and eax.
+ __ push(eax);
+ __ push(edx); // And also use them as the arguments.
__ push(eax);
__ push(Immediate(Smi::FromInt(op_)));
__ CallExternalReference(miss, 3);
+ // Compute the entry point of the rewritten stub.
+ __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
+ __ pop(eax);
+ __ pop(edx);
}
- // Compute the entry point of the rewritten stub.
- __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
-
- // Restore registers.
- __ pop(ecx);
- __ pop(eax);
- __ pop(edx);
- __ push(ecx);
-
// Do a tail call to the rewritten stub.
__ jmp(edi);
}
@@ -6604,7 +6814,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// Compute the masked index: (hash + i + i * i) & mask.
Register index = r0;
@@ -6630,11 +6840,17 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ cmp(entity_name, Handle<String>(name));
__ j(equal, miss);
+ Label the_hole;
+ // Check for the hole and skip.
+ __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
+ __ j(equal, &the_hole, Label::kNear);
+
// Check if the entry name is not a symbol.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
kIsSymbolMask);
__ j(zero, miss);
+ __ bind(&the_hole);
}
StringDictionaryLookupStub stub(properties,
@@ -6808,42 +7024,47 @@ struct AheadOfTimeWriteBarrierStubList {
};
-struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
+#define REG(Name) { kRegister_ ## Name ## _Code }
+
+static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// Used in RegExpExecStub.
- { ebx, eax, edi, EMIT_REMEMBERED_SET },
+ { REG(ebx), REG(eax), REG(edi), EMIT_REMEMBERED_SET },
// Used in CompileArrayPushCall.
- { ebx, ecx, edx, EMIT_REMEMBERED_SET },
- { ebx, edi, edx, OMIT_REMEMBERED_SET },
+ { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
+ { REG(ebx), REG(edi), REG(edx), OMIT_REMEMBERED_SET },
// Used in CompileStoreGlobal and CallFunctionStub.
- { ebx, ecx, edx, OMIT_REMEMBERED_SET },
+ { REG(ebx), REG(ecx), REG(edx), OMIT_REMEMBERED_SET },
// Used in StoreStubCompiler::CompileStoreField and
// KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
- { edx, ecx, ebx, EMIT_REMEMBERED_SET },
+ { REG(edx), REG(ecx), REG(ebx), EMIT_REMEMBERED_SET },
// GenerateStoreField calls the stub with two different permutations of
// registers. This is the second.
- { ebx, ecx, edx, EMIT_REMEMBERED_SET },
+ { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
// StoreIC::GenerateNormal via GenerateDictionaryStore
- { ebx, edi, edx, EMIT_REMEMBERED_SET },
+ { REG(ebx), REG(edi), REG(edx), EMIT_REMEMBERED_SET },
// KeyedStoreIC::GenerateGeneric.
- { ebx, edx, ecx, EMIT_REMEMBERED_SET},
+ { REG(ebx), REG(edx), REG(ecx), EMIT_REMEMBERED_SET},
// KeyedStoreStubCompiler::GenerateStoreFastElement.
- { edi, edx, ecx, EMIT_REMEMBERED_SET},
+ { REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
+ { REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateSmiOnlyToObject
// and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
- { edx, ebx, edi, EMIT_REMEMBERED_SET},
+ { REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
+ { REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateDoubleToObject
- { eax, edx, esi, EMIT_REMEMBERED_SET},
- { edx, eax, edi, EMIT_REMEMBERED_SET},
+ { REG(eax), REG(edx), REG(esi), EMIT_REMEMBERED_SET},
+ { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
- { ebx, eax, ecx, EMIT_REMEMBERED_SET},
+ { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
// Null termination.
- { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
+ { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
+#undef REG
bool RecordWriteStub::IsPregenerated() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
if (object_.is(entry->object) &&
@@ -6871,7 +7092,7 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
RecordWriteStub stub(entry->object,
@@ -7097,16 +7318,14 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
Label slow_elements_from_double;
Label fast_elements;
- if (!FLAG_trace_elements_transitions) {
- __ CheckFastElements(edi, &double_elements);
+ __ CheckFastElements(edi, &double_elements);
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
- __ JumpIfSmi(eax, &smi_element);
- __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear);
+ // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ __ JumpIfSmi(eax, &smi_element);
+ __ CheckFastSmiOnlyElements(edi, &fast_elements, Label::kNear);
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- }
+ // Store into the array literal requires a elements transition. Call into
+ // the runtime.
__ bind(&slow_elements);
__ pop(edi); // Pop return address and remember to put back later for tail
@@ -7121,49 +7340,45 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
// place.
__ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
- if (!FLAG_trace_elements_transitions) {
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
+ __ bind(&slow_elements_from_double);
+ __ pop(edx);
+ __ jmp(&slow_elements);
+
+ // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ __ bind(&fast_elements);
+ __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
+ __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
+ FixedArrayBase::kHeaderSize));
+ __ mov(Operand(ecx, 0), eax);
+ // Update the write barrier for the array store.
+ __ RecordWrite(ebx, ecx, eax,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ ret(0);
- __ push(edx);
- __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset));
- __ StoreNumberToDoubleElements(eax,
- edx,
- ecx,
- edi,
- xmm0,
- &slow_elements_from_double,
- false);
- __ pop(edx);
- __ jmp(&element_done);
+ // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+ // FAST_ELEMENTS, and value is Smi.
+ __ bind(&smi_element);
+ __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
+ __ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
+ FixedArrayBase::kHeaderSize), eax);
+ __ ret(0);
- __ bind(&slow_elements_from_double);
- __ pop(edx);
- __ jmp(&slow_elements);
-
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
- __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
- FixedArrayBase::kHeaderSize));
- __ mov(Operand(ecx, 0), eax);
- // Update the write barrier for the array store.
- __ RecordWrite(ebx, ecx, eax,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- __ jmp(&element_done);
-
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
- __ bind(&smi_element);
- __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
- __ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
- FixedArrayBase::kHeaderSize), eax);
- // Fall through
- __ bind(&element_done);
- __ ret(0);
- }
+ // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ __ bind(&double_elements);
+
+ __ push(edx);
+ __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset));
+ __ StoreNumberToDoubleElements(eax,
+ edx,
+ ecx,
+ edi,
+ xmm0,
+ &slow_elements_from_double,
+ false);
+ __ pop(edx);
+ __ ret(0);
}
#undef __
diff --git a/src/3rdparty/v8/src/ia32/code-stubs-ia32.h b/src/3rdparty/v8/src/ia32/code-stubs-ia32.h
index 692cbcf..803a711 100644
--- a/src/3rdparty/v8/src/ia32/code-stubs-ia32.h
+++ b/src/3rdparty/v8/src/ia32/code-stubs-ia32.h
@@ -49,6 +49,8 @@ class TranscendentalCacheStub: public CodeStub {
ArgumentType argument_type)
: type_(type), argument_type_(argument_type) {}
void Generate(MacroAssembler* masm);
+ static void GenerateOperation(MacroAssembler* masm,
+ TranscendentalCache::Type type);
private:
TranscendentalCache::Type type_;
ArgumentType argument_type_;
@@ -56,7 +58,6 @@ class TranscendentalCacheStub: public CodeStub {
Major MajorKey() { return TranscendentalCache; }
int MinorKey() { return type_ | argument_type_; }
Runtime::FunctionId RuntimeFunction();
- void GenerateOperation(MacroAssembler* masm);
};
@@ -147,7 +148,7 @@ class UnaryOpStub: public CodeStub {
return UnaryOpIC::ToState(operand_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
@@ -234,7 +235,7 @@ class BinaryOpStub: public CodeStub {
return BinaryOpIC::ToState(operands_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_binary_op_type(operands_type_);
code->set_binary_op_result_type(result_type_);
}
@@ -711,13 +712,6 @@ class RecordWriteStub: public CodeStub {
SaveFPRegsModeBits::encode(save_fp_regs_mode_);
}
- bool MustBeInStubCache() {
- // All stubs must be registered in the stub cache
- // otherwise IncrementalMarker would not be able to find
- // and patch it.
- return true;
- }
-
void Activate(Code* code) {
code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
}
diff --git a/src/3rdparty/v8/src/ia32/codegen-ia32.cc b/src/3rdparty/v8/src/ia32/codegen-ia32.cc
index 3912727..ea61910 100644
--- a/src/3rdparty/v8/src/ia32/codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/codegen-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -30,6 +30,7 @@
#if defined(V8_TARGET_ARCH_IA32)
#include "codegen.h"
+#include "heap.h"
#include "macro-assembler.h"
namespace v8 {
@@ -55,6 +56,85 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
#define __ masm.
+
+UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ if (buffer == NULL) {
+ // Fallback to library function if function cannot be created.
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ }
+
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // esp[1 * kPointerSize]: raw double input
+ // esp[0 * kPointerSize]: return address
+ // Move double input into registers.
+
+ __ push(ebx);
+ __ push(edx);
+ __ push(edi);
+ __ fld_d(Operand(esp, 4 * kPointerSize));
+ __ mov(ebx, Operand(esp, 4 * kPointerSize));
+ __ mov(edx, Operand(esp, 5 * kPointerSize));
+ TranscendentalCacheStub::GenerateOperation(&masm, type);
+ // The return value is expected to be on ST(0) of the FPU stack.
+ __ pop(edi);
+ __ pop(edx);
+ __ pop(ebx);
+ __ Ret();
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<UnaryMathFunction>(buffer);
+}
+
+
+UnaryMathFunction CreateSqrtFunction() {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ // If SSE2 is not available, we can use libc's implementation to ensure
+ // consistency since code by fullcodegen's calls into runtime in that case.
+ if (buffer == NULL || !CpuFeatures::IsSupported(SSE2)) return &sqrt;
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // esp[1 * kPointerSize]: raw double input
+ // esp[0 * kPointerSize]: return address
+ // Move double input into registers.
+ {
+ CpuFeatures::Scope use_sse2(SSE2);
+ __ movdbl(xmm0, Operand(esp, 1 * kPointerSize));
+ __ sqrtsd(xmm0, xmm0);
+ __ movdbl(Operand(esp, 1 * kPointerSize), xmm0);
+ // Load result into floating point register as return value.
+ __ fld_d(Operand(esp, 1 * kPointerSize));
+ __ Ret();
+ }
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<UnaryMathFunction>(buffer);
+}
+
+
static void MemCopyWrapper(void* dest, const void* src, size_t size) {
memcpy(dest, src, size);
}
@@ -301,11 +381,17 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
- Label loop, entry, convert_hole, gc_required;
+ Label loop, entry, convert_hole, gc_required, only_change_map;
+
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+ __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
+ __ j(equal, &only_change_map);
+
__ push(eax);
__ push(ebx);
- __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
__ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
// Allocate new FixedDoubleArray.
@@ -378,6 +464,12 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// Found hole, store hole_nan_as_double instead.
__ bind(&convert_hole);
+
+ if (FLAG_debug_code) {
+ __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
+ __ Assert(equal, "object found in smi-only array");
+ }
+
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ movdbl(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
@@ -393,6 +485,11 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ pop(ebx);
__ pop(eax);
+
+ // Restore esi.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+
+ __ bind(&only_change_map);
// eax: value
// ebx: target map
// Set transitioned map.
@@ -402,10 +499,8 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
ebx,
edi,
kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
+ OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
- // Restore esi.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
}
@@ -418,12 +513,18 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
- Label loop, entry, convert_hole, gc_required;
+ Label loop, entry, convert_hole, gc_required, only_change_map, success;
+
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+ __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
+ __ j(equal, &only_change_map);
+
__ push(eax);
__ push(edx);
__ push(ebx);
- __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
__ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
// Allocate new FixedArray.
@@ -440,6 +541,20 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ jmp(&entry);
+ // ebx: target map
+ // edx: receiver
+ // Set transitioned map.
+ __ bind(&only_change_map);
+ __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
+ __ RecordWriteField(edx,
+ HeapObject::kMapOffset,
+ ebx,
+ edi,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ jmp(&success);
+
// Call into runtime if GC is required.
__ bind(&gc_required);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -501,7 +616,7 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
ebx,
edi,
kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
+ OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created and filled FixedArray.
__ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
@@ -516,6 +631,112 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// Restore registers.
__ pop(eax);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+
+ __ bind(&success);
+}
+
+
+void StringCharLoadGenerator::Generate(MacroAssembler* masm,
+ Factory* factory,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime) {
+ // Fetch the instance type of the receiver into result register.
+ __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
+
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ test(result, Immediate(kIsIndirectStringMask));
+ __ j(zero, &check_sequential, Label::kNear);
+
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ test(result, Immediate(kSlicedNotConsMask));
+ __ j(zero, &cons_string, Label::kNear);
+
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
+ __ SmiUntag(result);
+ __ add(index, result);
+ __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
+ __ jmp(&indirect_string_loaded, Label::kNear);
+
+ // Handle cons strings.
+ // Check whether the right hand side is the empty string (i.e. if
+ // this is really a flat string in a cons string). If that is not
+ // the case we would rather go to the runtime system now to flatten
+ // the string.
+ __ bind(&cons_string);
+ __ cmp(FieldOperand(string, ConsString::kSecondOffset),
+ Immediate(factory->empty_string()));
+ __ j(not_equal, call_runtime);
+ __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
+ __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
+
+ // Distinguish sequential and external strings. Only these two string
+ // representations can reach here (slices and flat cons strings have been
+ // reduced to the underlying sequential or external string).
+ Label seq_string;
+ __ bind(&check_sequential);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ test(result, Immediate(kStringRepresentationMask));
+ __ j(zero, &seq_string, Label::kNear);
+
+ // Handle external strings.
+ Label ascii_external, done;
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ test(result, Immediate(kIsIndirectStringMask));
+ __ Assert(zero, "external string expected, but not found");
+ }
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ test_b(result, kShortExternalStringMask);
+ __ j(not_zero, call_runtime);
+ // Check encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ test_b(result, kStringEncodingMask);
+ __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
+ __ j(not_equal, &ascii_external, Label::kNear);
+ // Two-byte string.
+ __ movzx_w(result, Operand(result, index, times_2, 0));
+ __ jmp(&done, Label::kNear);
+ __ bind(&ascii_external);
+ // Ascii string.
+ __ movzx_b(result, Operand(result, index, times_1, 0));
+ __ jmp(&done, Label::kNear);
+
+ // Dispatch on the encoding: ASCII or two-byte.
+ Label ascii;
+ __ bind(&seq_string);
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+ __ test(result, Immediate(kStringEncodingMask));
+ __ j(not_zero, &ascii, Label::kNear);
+
+ // Two-byte string.
+ // Load the two-byte character code into the result register.
+ __ movzx_w(result, FieldOperand(string,
+ index,
+ times_2,
+ SeqTwoByteString::kHeaderSize));
+ __ jmp(&done, Label::kNear);
+
+ // Ascii string.
+ // Load the byte into the result register.
+ __ bind(&ascii);
+ __ movzx_b(result, FieldOperand(string,
+ index,
+ times_1,
+ SeqAsciiString::kHeaderSize));
+ __ bind(&done);
}
#undef __
diff --git a/src/3rdparty/v8/src/ia32/codegen-ia32.h b/src/3rdparty/v8/src/ia32/codegen-ia32.h
index c85fa83..f4ab0b5 100644
--- a/src/3rdparty/v8/src/ia32/codegen-ia32.h
+++ b/src/3rdparty/v8/src/ia32/codegen-ia32.h
@@ -72,6 +72,22 @@ class CodeGenerator {
};
+class StringCharLoadGenerator : public AllStatic {
+ public:
+ // Generates the code for handling different string types and loading the
+ // indexed character into |result|. We expect |index| as untagged input and
+ // |result| as untagged output.
+ static void Generate(MacroAssembler* masm,
+ Factory* factory,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(StringCharLoadGenerator);
+};
+
} } // namespace v8::internal
#endif // V8_IA32_CODEGEN_IA32_H_
diff --git a/src/3rdparty/v8/src/ia32/cpu-ia32.cc b/src/3rdparty/v8/src/ia32/cpu-ia32.cc
index 57e66df..9eabb2a 100644
--- a/src/3rdparty/v8/src/ia32/cpu-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/cpu-ia32.cc
@@ -41,7 +41,7 @@
namespace v8 {
namespace internal {
-void CPU::Setup() {
+void CPU::SetUp() {
CpuFeatures::Probe();
}
diff --git a/src/3rdparty/v8/src/ia32/debug-ia32.cc b/src/3rdparty/v8/src/ia32/debug-ia32.cc
index d7184ed..d13fa75 100644
--- a/src/3rdparty/v8/src/ia32/debug-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/debug-ia32.cc
@@ -222,8 +222,36 @@ void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateConstructCallDebugBreak(MacroAssembler* masm) {
+void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
// Register state just before return from JS function (from codegen-ia32.cc).
+ // ----------- S t a t e -------------
+ // -- eax: return value
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, eax.bit(), 0, true);
+}
+
+
+void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-ia32.cc).
+ // ----------- S t a t e -------------
+ // -- edi: function
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, edi.bit(), 0, false);
+}
+
+
+void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-ia32.cc).
+ // ----------- S t a t e -------------
+ // -- ebx: cache cell for call target
+ // -- edi: function
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, ebx.bit() | edi.bit(), 0, false);
+}
+
+
+void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallConstructStub (from code-stubs-ia32.cc).
// eax is the actual number of arguments not encoded as a smi see comment
// above IC call.
// ----------- S t a t e -------------
@@ -235,21 +263,17 @@ void Debug::GenerateConstructCallDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
- // Register state just before return from JS function (from codegen-ia32.cc).
- // ----------- S t a t e -------------
- // -- eax: return value
- // -----------------------------------
- Generate_DebugBreakCallHelper(masm, eax.bit(), 0, true);
-}
-
-
-void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
- // Register state for stub CallFunction (from CallFunctionStub in ic-ia32.cc).
+void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallConstructStub (from code-stubs-ia32.cc).
+ // eax is the actual number of arguments not encoded as a smi see comment
+ // above IC call.
// ----------- S t a t e -------------
- // No registers used on entry.
+ // -- eax: number of arguments (not smi)
+ // -- ebx: cache cell for call target
+ // -- edi: constructor function
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, 0, 0, false);
+ // The number of arguments in eax is not smi encoded.
+ Generate_DebugBreakCallHelper(masm, ebx.bit() | edi.bit(), eax.bit(), false);
}
@@ -258,9 +282,7 @@ void Debug::GenerateSlot(MacroAssembler* masm) {
Label check_codesize;
__ bind(&check_codesize);
__ RecordDebugBreakSlot();
- for (int i = 0; i < Assembler::kDebugBreakSlotLength; i++) {
- __ nop();
- }
+ __ Nop(Assembler::kDebugBreakSlotLength);
ASSERT_EQ(Assembler::kDebugBreakSlotLength,
masm->SizeOfCodeGeneratedSince(&check_codesize));
}
diff --git a/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc b/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
index db6c16b..3f10c09 100644
--- a/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/deoptimizer-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,16 +45,6 @@ int Deoptimizer::patch_size() {
}
-static void ZapCodeRange(Address start, Address end) {
-#ifdef DEBUG
- ASSERT(start <= end);
- int size = end - start;
- CodePatcher destroyer(start, size);
- while (size-- > 0) destroyer.masm()->int3();
-#endif
-}
-
-
void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
Isolate* isolate = code->GetIsolate();
HandleScope scope(isolate);
@@ -62,30 +52,23 @@ void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
// Compute the size of relocation information needed for the code
// patching in Deoptimizer::DeoptimizeFunction.
int min_reloc_size = 0;
- Address prev_reloc_address = code->instruction_start();
- Address code_start_address = code->instruction_start();
- SafepointTable table(*code);
- for (unsigned i = 0; i < table.length(); ++i) {
- Address curr_reloc_address = code_start_address + table.GetPcOffset(i);
- ASSERT_GE(curr_reloc_address, prev_reloc_address);
- SafepointEntry safepoint_entry = table.GetEntry(i);
- int deoptimization_index = safepoint_entry.deoptimization_index();
- if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
- // The gap code is needed to get to the state expected at the
- // bailout and we need to skip the call opcode to get to the
- // address that needs reloc.
- curr_reloc_address += safepoint_entry.gap_code_size() + 1;
- int pc_delta = curr_reloc_address - prev_reloc_address;
- // We use RUNTIME_ENTRY reloc info which has a size of 2 bytes
- // if encodable with small pc delta encoding and up to 6 bytes
- // otherwise.
- if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
- min_reloc_size += 2;
- } else {
- min_reloc_size += 6;
- }
- prev_reloc_address = curr_reloc_address;
+ int prev_pc_offset = 0;
+ DeoptimizationInputData* deopt_data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+ for (int i = 0; i < deopt_data->DeoptCount(); i++) {
+ int pc_offset = deopt_data->Pc(i)->value();
+ if (pc_offset == -1) continue;
+ ASSERT_GE(pc_offset, prev_pc_offset);
+ int pc_delta = pc_offset - prev_pc_offset;
+ // We use RUNTIME_ENTRY reloc info which has a size of 2 bytes
+ // if encodable with small pc delta encoding and up to 6 bytes
+ // otherwise.
+ if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
+ min_reloc_size += 2;
+ } else {
+ min_reloc_size += 6;
}
+ prev_pc_offset = pc_offset;
}
// If the relocation information is not big enough we create a new
@@ -150,41 +133,41 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
Address reloc_end_address = reloc_info->address() + reloc_info->Size();
RelocInfoWriter reloc_info_writer(reloc_end_address, code_start_address);
- // For each return after a safepoint insert a call to the corresponding
- // deoptimization entry. Since the call is a relative encoding, write new
+ // For each LLazyBailout instruction insert a call to the corresponding
+ // deoptimization entry.
+
+ // Since the call is a relative encoding, write new
// reloc info. We do not need any of the existing reloc info because the
// existing code will not be used again (we zap it in debug builds).
- SafepointTable table(code);
- Address prev_address = code_start_address;
- for (unsigned i = 0; i < table.length(); ++i) {
- Address curr_address = code_start_address + table.GetPcOffset(i);
- ASSERT_GE(curr_address, prev_address);
- ZapCodeRange(prev_address, curr_address);
-
- SafepointEntry safepoint_entry = table.GetEntry(i);
- int deoptimization_index = safepoint_entry.deoptimization_index();
- if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
- // The gap code is needed to get to the state expected at the bailout.
- curr_address += safepoint_entry.gap_code_size();
-
- CodePatcher patcher(curr_address, patch_size());
- Address deopt_entry = GetDeoptimizationEntry(deoptimization_index, LAZY);
- patcher.masm()->call(deopt_entry, RelocInfo::NONE);
-
- // We use RUNTIME_ENTRY for deoptimization bailouts.
- RelocInfo rinfo(curr_address + 1, // 1 after the call opcode.
- RelocInfo::RUNTIME_ENTRY,
- reinterpret_cast<intptr_t>(deopt_entry),
- NULL);
- reloc_info_writer.Write(&rinfo);
- ASSERT_GE(reloc_info_writer.pos(),
- reloc_info->address() + ByteArray::kHeaderSize);
- curr_address += patch_size();
- }
- prev_address = curr_address;
+ //
+ // Emit call to lazy deoptimization at all lazy deopt points.
+ DeoptimizationInputData* deopt_data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+#ifdef DEBUG
+ Address prev_call_address = NULL;
+#endif
+ for (int i = 0; i < deopt_data->DeoptCount(); i++) {
+ if (deopt_data->Pc(i)->value() == -1) continue;
+ // Patch lazy deoptimization entry.
+ Address call_address = code_start_address + deopt_data->Pc(i)->value();
+ CodePatcher patcher(call_address, patch_size());
+ Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
+ patcher.masm()->call(deopt_entry, RelocInfo::NONE);
+ // We use RUNTIME_ENTRY for deoptimization bailouts.
+ RelocInfo rinfo(call_address + 1, // 1 after the call opcode.
+ RelocInfo::RUNTIME_ENTRY,
+ reinterpret_cast<intptr_t>(deopt_entry),
+ NULL);
+ reloc_info_writer.Write(&rinfo);
+ ASSERT_GE(reloc_info_writer.pos(),
+ reloc_info->address() + ByteArray::kHeaderSize);
+ ASSERT(prev_call_address == NULL ||
+ call_address >= prev_call_address + patch_size());
+ ASSERT(call_address + patch_size() <= code->instruction_end());
+#ifdef DEBUG
+ prev_call_address = call_address;
+#endif
}
- ZapCodeRange(prev_address,
- code_start_address + code->safepoint_table_offset());
// Move the relocation info to the beginning of the byte array.
int new_reloc_size = reloc_end_address - reloc_info_writer.pos();
@@ -218,22 +201,26 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
-#ifdef DEBUG
- if (FLAG_print_code) {
- code->PrintLn();
- }
-#endif
}
}
+static const byte kJnsInstruction = 0x79;
+static const byte kJnsOffset = 0x13;
+static const byte kJaeInstruction = 0x73;
+static const byte kJaeOffset = 0x07;
+static const byte kCallInstruction = 0xe8;
+static const byte kNopByteOne = 0x66;
+static const byte kNopByteTwo = 0x90;
+
+
void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
Address pc_after,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(check_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(check_code->entry(),
+ Assembler::target_address_at(call_target_address));
// The stack check code matches the pattern:
//
// cmp esp, <limit>
@@ -250,11 +237,17 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// call <on-stack replacment>
// test eax, <loop nesting depth>
// ok:
- ASSERT(*(call_target_address - 3) == 0x73 && // jae
- *(call_target_address - 2) == 0x07 && // offset
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x90; // nop
- *(call_target_address - 2) = 0x90; // nop
+
+ if (FLAG_count_based_interrupts) {
+ ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
+ } else {
+ ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
+ }
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ *(call_target_address - 3) = kNopByteOne;
+ *(call_target_address - 2) = kNopByteTwo;
Assembler::set_target_address_at(call_target_address,
replacement_code->entry());
@@ -268,15 +261,21 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(replacement_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(replacement_code->entry(),
+ Assembler::target_address_at(call_target_address));
+
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
- ASSERT(*(call_target_address - 3) == 0x90 && // nop
- *(call_target_address - 2) == 0x90 && // nop
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x73; // jae
- *(call_target_address - 2) = 0x07; // offset
+ ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
+ ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ if (FLAG_count_based_interrupts) {
+ *(call_target_address - 3) = kJnsInstruction;
+ *(call_target_address - 2) = kJnsOffset;
+ } else {
+ *(call_target_address - 3) = kJaeInstruction;
+ *(call_target_address - 2) = kJaeOffset;
+ }
Assembler::set_target_address_at(call_target_address,
check_code->entry());
@@ -321,12 +320,13 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
ASSERT(Translation::BEGIN == opcode);
USE(opcode);
int count = iterator.Next();
+ iterator.Next(); // Drop JS frames count.
ASSERT(count == 1);
USE(count);
opcode = static_cast<Translation::Opcode>(iterator.Next());
USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+ ASSERT(Translation::JS_FRAME == opcode);
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
@@ -362,9 +362,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_ = new FrameDescription*[1];
output_[0] = new(output_frame_size) FrameDescription(
output_frame_size, function_);
-#ifdef DEBUG
- output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
-#endif
+ output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
// Clear the incoming parameters in the optimized frame to avoid
// confusing the garbage collector.
@@ -428,15 +426,8 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0] = input_;
output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
} else {
- // Setup the frame pointer and the context pointer.
- // All OSR stack frames are dynamically aligned to an 8-byte boundary.
- int frame_pointer = input_->GetRegister(ebp.code());
- if ((frame_pointer & 0x4) == 0) {
- // Return address at FP + 4 should be aligned, so FP mod 8 should be 4.
- frame_pointer -= kPointerSize;
- has_alignment_padding_ = 1;
- }
- output_[0]->SetRegister(ebp.code(), frame_pointer);
+ // Set up the frame pointer and the context pointer.
+ output_[0]->SetRegister(ebp.code(), input_->GetRegister(ebp.code()));
output_[0]->SetRegister(esi.code(), input_->GetRegister(esi.code()));
unsigned pc_offset = data->OsrPcOffset()->value();
@@ -459,13 +450,208 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
}
-void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
- int frame_index) {
- // Read the ast node id, function, and frame height for this output frame.
- Translation::Opcode opcode =
- static_cast<Translation::Opcode>(iterator->Next());
- USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
+
+ // Arguments adaptor can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // A marker value is used in place of the context.
+ output_offset -= kPointerSize;
+ intptr_t context = reinterpret_cast<intptr_t>(
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ output_frame->SetFrameSlot(output_offset, context);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
+ top_address + output_offset, output_offset, context);
+ }
+
+ // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* adaptor_trampoline =
+ builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ adaptor_trampoline->instruction_start() +
+ isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 6 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
+void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
+ int frame_index) {
int node_id = iterator->Next();
JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
unsigned height = iterator->Next();
@@ -485,9 +671,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// Allocate and store the output frame description.
FrameDescription* output_frame =
new(output_frame_size) FrameDescription(output_frame_size, function);
-#ifdef DEBUG
- output_frame->SetKind(Code::FUNCTION);
-#endif
+ output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
bool is_bottommost = (0 == frame_index);
bool is_topmost = (output_count_ - 1 == frame_index);
@@ -501,11 +685,9 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// top address and the current frame's size.
uint32_t top_address;
if (is_bottommost) {
- // If the optimized frame had alignment padding, adjust the frame pointer
- // to point to the new position of the old frame pointer after padding
- // is removed. Subtract 2 * kPointerSize for the context and function slots.
- top_address = input_->GetRegister(ebp.code()) - (2 * kPointerSize) -
- height_in_bytes + has_alignment_padding_ * kPointerSize;
+ // 2 = context and function in the frame.
+ top_address =
+ input_->GetRegister(ebp.code()) - (2 * kPointerSize) - height_in_bytes;
} else {
top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
}
@@ -556,9 +738,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
}
output_frame->SetFrameSlot(output_offset, value);
intptr_t fp_value = top_address + output_offset;
- ASSERT(!is_bottommost ||
- input_->GetRegister(ebp.code()) + has_alignment_padding_ * kPointerSize
- == fp_value);
+ ASSERT(!is_bottommost || input_->GetRegister(ebp.code()) == fp_value);
output_frame->SetFp(fp_value);
if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value);
if (FLAG_trace_deopt) {
@@ -577,6 +757,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
value = reinterpret_cast<uint32_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
+ output_frame->SetContext(value);
if (is_topmost) output_frame->SetRegister(esi.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
@@ -747,17 +928,6 @@ void Deoptimizer::EntryGenerator::Generate() {
__ cmp(ecx, esp);
__ j(not_equal, &pop_loop);
- // If frame was dynamically aligned, pop padding.
- Label sentinel, sentinel_done;
- __ pop(ecx);
- __ cmp(ecx, Operand(eax, Deoptimizer::frame_alignment_marker_offset()));
- __ j(equal, &sentinel);
- __ push(ecx);
- __ jmp(&sentinel_done);
- __ bind(&sentinel);
- __ mov(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
- Immediate(1));
- __ bind(&sentinel_done);
// Compute the output frame in the deoptimizer.
__ push(eax);
__ PrepareCallCFunction(1, ebx);
@@ -769,17 +939,6 @@ void Deoptimizer::EntryGenerator::Generate() {
}
__ pop(eax);
- if (type() == OSR) {
- // If alignment padding is added, push the sentinel.
- Label no_osr_padding;
- __ cmp(Operand(eax, Deoptimizer::has_alignment_padding_offset()),
- Immediate(0));
- __ j(equal, &no_osr_padding, Label::kNear);
- __ push(Operand(eax, Deoptimizer::frame_alignment_marker_offset()));
- __ bind(&no_osr_padding);
- }
-
-
// Replace the current frame with the output frames.
Label outer_push_loop, inner_push_loop;
// Outer loop state: eax = current FrameDescription**, edx = one past the
diff --git a/src/3rdparty/v8/src/ia32/disasm-ia32.cc b/src/3rdparty/v8/src/ia32/disasm-ia32.cc
index da22390..b5ddcca 100644
--- a/src/3rdparty/v8/src/ia32/disasm-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/disasm-ia32.cc
@@ -763,10 +763,13 @@ int DisassemblerIA32::RegisterFPUInstruction(int escape_opcode,
case 0xEB: mnem = "fldpi"; break;
case 0xED: mnem = "fldln2"; break;
case 0xEE: mnem = "fldz"; break;
+ case 0xF0: mnem = "f2xm1"; break;
case 0xF1: mnem = "fyl2x"; break;
case 0xF5: mnem = "fprem1"; break;
case 0xF7: mnem = "fincstp"; break;
case 0xF8: mnem = "fprem"; break;
+ case 0xFC: mnem = "frndint"; break;
+ case 0xFD: mnem = "fscale"; break;
case 0xFE: mnem = "fsin"; break;
case 0xFF: mnem = "fcos"; break;
default: UnimplementedInstruction();
@@ -788,6 +791,8 @@ int DisassemblerIA32::RegisterFPUInstruction(int escape_opcode,
has_register = true;
} else if (modrm_byte == 0xE2) {
mnem = "fclex";
+ } else if (modrm_byte == 0xE3) {
+ mnem = "fninit";
} else {
UnimplementedInstruction();
}
@@ -987,7 +992,7 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
break;
case 0x0F:
- { byte f0byte = *(data+1);
+ { byte f0byte = data[1];
const char* f0mnem = F0Mnem(f0byte);
if (f0byte == 0x18) {
int mod, regop, rm;
@@ -995,6 +1000,25 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
const char* suffix[] = {"nta", "1", "2", "3"};
AppendToBuffer("%s%s ", f0mnem, suffix[regop & 0x03]);
data += PrintRightOperand(data);
+ } else if (f0byte == 0x1F && data[2] == 0) {
+ AppendToBuffer("nop"); // 3 byte nop.
+ data += 3;
+ } else if (f0byte == 0x1F && data[2] == 0x40 && data[3] == 0) {
+ AppendToBuffer("nop"); // 4 byte nop.
+ data += 4;
+ } else if (f0byte == 0x1F && data[2] == 0x44 && data[3] == 0 &&
+ data[4] == 0) {
+ AppendToBuffer("nop"); // 5 byte nop.
+ data += 5;
+ } else if (f0byte == 0x1F && data[2] == 0x80 && data[3] == 0 &&
+ data[4] == 0 && data[5] == 0 && data[6] == 0) {
+ AppendToBuffer("nop"); // 7 byte nop.
+ data += 7;
+ } else if (f0byte == 0x1F && data[2] == 0x84 && data[3] == 0 &&
+ data[4] == 0 && data[5] == 0 && data[6] == 0 &&
+ data[7] == 0) {
+ AppendToBuffer("nop"); // 8 byte nop.
+ data += 8;
} else if (f0byte == 0xA2 || f0byte == 0x31) {
AppendToBuffer("%s", f0mnem);
data += 2;
@@ -1130,8 +1154,12 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
break;
case 0x66: // prefix
- data++;
- if (*data == 0x8B) {
+ while (*data == 0x66) data++;
+ if (*data == 0xf && data[1] == 0x1f) {
+ AppendToBuffer("nop"); // 0x66 prefix
+ } else if (*data == 0x90) {
+ AppendToBuffer("nop"); // 0x66 prefix
+ } else if (*data == 0x8B) {
data++;
data += PrintOperands("mov_w", REG_OPER_OP_ORDER, data);
} else if (*data == 0x89) {
@@ -1185,6 +1213,16 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
NameOfXMMRegister(rm),
static_cast<int>(imm8));
data += 2;
+ } else if (*data == 0x17) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, &regop, &rm);
+ int8_t imm8 = static_cast<int8_t>(data[1]);
+ AppendToBuffer("extractps %s,%s,%d",
+ NameOfCPURegister(regop),
+ NameOfXMMRegister(rm),
+ static_cast<int>(imm8));
+ data += 2;
} else if (*data == 0x22) {
data++;
int mod, regop, rm;
@@ -1258,6 +1296,9 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
NameOfXMMRegister(rm),
static_cast<int>(imm8));
data += 2;
+ } else if (*data == 0x90) {
+ data++;
+ AppendToBuffer("nop"); // 2 byte nop.
} else if (*data == 0xF3) {
data++;
int mod, regop, rm;
diff --git a/src/3rdparty/v8/src/ia32/frames-ia32.h b/src/3rdparty/v8/src/ia32/frames-ia32.h
index 2f1b2a9..9e51857 100644
--- a/src/3rdparty/v8/src/ia32/frames-ia32.h
+++ b/src/3rdparty/v8/src/ia32/frames-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,37 +34,37 @@ namespace internal {
// Register lists
// Note that the bit values must match those used in actual instruction encoding
-static const int kNumRegs = 8;
+const int kNumRegs = 8;
// Caller-saved registers
-static const RegList kJSCallerSaved =
+const RegList kJSCallerSaved =
1 << 0 | // eax
1 << 1 | // ecx
1 << 2 | // edx
1 << 3 | // ebx - used as a caller-saved register in JavaScript code
1 << 7; // edi - callee function
-static const int kNumJSCallerSaved = 5;
+const int kNumJSCallerSaved = 5;
typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
// Number of registers for which space is reserved in safepoints.
-static const int kNumSafepointRegisters = 8;
+const int kNumSafepointRegisters = 8;
// ----------------------------------------------------
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kContextOffset = 1 * kPointerSize;
- static const int kFPOffset = 2 * kPointerSize;
- static const int kStateOffset = 3 * kPointerSize;
- static const int kPCOffset = 4 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kCodeOffset = 1 * kPointerSize;
+ static const int kStateOffset = 2 * kPointerSize;
+ static const int kContextOffset = 3 * kPointerSize;
+ static const int kFPOffset = 4 * kPointerSize;
- static const int kSize = kPCOffset + kPointerSize;
+ static const int kSize = kFPOffset + kPointerSize;
};
@@ -95,9 +95,11 @@ class ExitFrameConstants : public AllStatic {
class StandardFrameConstants : public AllStatic {
public:
+ // Fixed part of the frame consists of return address, caller fp,
+ // context and function.
// StandardFrame::IterateExpressions assumes that kContextOffset is the last
// object pointer.
- static const int kFixedFrameSize = 4; // Currently unused.
+ static const int kFixedFrameSize = 4 * kPointerSize;
static const int kExpressionsOffset = -3 * kPointerSize;
static const int kMarkerOffset = -2 * kPointerSize;
static const int kContextOffset = -1 * kPointerSize;
@@ -123,6 +125,8 @@ class JavaScriptFrameConstants : public AllStatic {
class ArgumentsAdaptorFrameConstants : public AllStatic {
public:
static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
+ static const int kFrameSize =
+ StandardFrameConstants::kFixedFrameSize + kPointerSize;
};
diff --git a/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc b/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
index 84149bd..376671d 100644
--- a/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/full-codegen-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,6 +34,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -106,17 +107,19 @@ class JumpPatchSite BASE_EMBEDDED {
// formal parameter count expected by the function.
//
// The live registers are:
-// o edi: the JS function object being called (ie, ourselves)
+// o edi: the JS function object being called (i.e. ourselves)
// o esi: our context
// o ebp: our caller's frame pointer
// o esp: stack pointer (pointing to return address)
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-ia32.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info) {
- ASSERT(info_ == NULL);
- info_ = info;
- scope_ = info->scope();
+void FullCodeGenerator::Generate() {
+ CompilationInfo* info = info_;
+ handler_table_ =
+ isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
@@ -131,12 +134,16 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// with undefined when called as functions (without an explicit
// receiver object). ecx is zero for method calls and non-zero for
// function calls.
- if (info->is_strict_mode() || info->is_native()) {
+ if (!info->is_classic_mode() || info->is_native()) {
Label ok;
__ test(ecx, ecx);
__ j(zero, &ok, Label::kNear);
// +1 for return address.
int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
+ __ mov(ecx, Operand(esp, receiver_offset));
+ __ JumpIfSmi(ecx, &ok);
+ __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
+ __ j(not_equal, &ok, Label::kNear);
__ mov(Operand(esp, receiver_offset),
Immediate(isolate()->factory()->undefined_value()));
__ bind(&ok);
@@ -168,13 +175,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Possibly allocate a local context.
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is still in edi.
__ push(edi);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -222,12 +228,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
__ push(edx);
__ SafePush(Immediate(Smi::FromInt(num_parameters)));
- // Arguments to ArgumentsAccessStub and/or New...:
+ // Arguments to ArgumentsAccessStub:
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
- if (is_strict_mode()) {
+ if (!is_classic_mode()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
@@ -256,11 +262,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@@ -298,15 +304,62 @@ void FullCodeGenerator::ClearAccumulator() {
}
-void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ mov(ebx, Immediate(profiling_counter_));
+ __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(delta)));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing: if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ mov(ebx, Immediate(profiling_counter_));
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(reset_value)));
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 100;
+
+
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
+ Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
Label ok;
- ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit(isolate());
- __ cmp(esp, Operand::StaticVariable(stack_limit));
- __ j(above_equal, &ok, Label::kNear);
- StackCheckStub stub;
- __ CallStub(&stub);
+
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ j(positive, &ok, Label::kNear);
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ // Count based interrupts happen often enough when they are enabled
+ // that the additional stack checks are not necessary (they would
+ // only check for interrupts).
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit(isolate());
+ __ cmp(esp, Operand::StaticVariable(stack_limit));
+ __ j(above_equal, &ok, Label::kNear);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
+
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
@@ -319,6 +372,10 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
ASSERT(loop_depth() > 0);
__ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
@@ -339,6 +396,31 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(eax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ j(positive, &ok, Label::kNear);
+ __ push(eax);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(eax);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
@@ -676,17 +758,16 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
Variable* variable = proxy->var();
- bool binding_needs_init =
- mode == CONST || mode == CONST_HARMONY || mode == LET;
+ bool binding_needs_init = (function == NULL) &&
+ (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
@@ -766,9 +847,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(esi); // The context is the first argument.
@@ -830,7 +908,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ test(eax, eax);
__ j(not_equal, &next_test);
@@ -879,6 +957,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ cmp(eax, isolate()->factory()->null_value());
__ j(equal, &exit);
+ PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
+
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(eax, &convert, Label::kNear);
@@ -891,7 +971,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(eax);
// Check for proxies.
- Label call_runtime;
+ Label call_runtime, use_cache, fixed_array;
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
__ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
__ j(below_equal, &call_runtime);
@@ -900,61 +980,19 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
- Label next;
- __ mov(ecx, eax);
- __ bind(&next);
-
- // Check that there are no elements. Register ecx contains the
- // current JS object we've reached through the prototype chain.
- __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
- isolate()->factory()->empty_fixed_array());
- __ j(not_equal, &call_runtime);
-
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in ebx for the subsequent
- // prototype load.
- __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
- __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
- __ JumpIfSmi(edx, &call_runtime);
-
- // Check that there is an enum cache in the non-empty instance
- // descriptors (edx). This is the case if the next enumeration
- // index field does not contain a smi.
- __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
- __ JumpIfSmi(edx, &call_runtime);
-
- // For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- __ cmp(ecx, eax);
- __ j(equal, &check_prototype, Label::kNear);
- __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- __ cmp(edx, isolate()->factory()->empty_fixed_array());
- __ j(not_equal, &call_runtime);
-
- // Load the prototype from the map and loop if non-null.
- __ bind(&check_prototype);
- __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
- __ cmp(ecx, isolate()->factory()->null_value());
- __ j(not_equal, &next);
+ __ CheckEnumCache(&call_runtime);
- // The enum cache is valid. Load the map of the object being
- // iterated over and use the cache for the iteration.
- Label use_cache;
__ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
__ jmp(&use_cache, Label::kNear);
// Get the set of properties to enumerate.
__ bind(&call_runtime);
- __ push(eax); // Duplicate the enumerable object on the stack.
+ __ push(eax);
__ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
-
- // If we got a map from the runtime call, we can do a fast
- // modification check. Otherwise, we got a fixed array, and we have
- // to do a slow check.
- Label fixed_array;
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
isolate()->factory()->meta_map());
- __ j(not_equal, &fixed_array, Label::kNear);
+ __ j(not_equal, &fixed_array);
+
// We got a map in register eax. Get the enumeration cache from it.
__ bind(&use_cache);
@@ -962,7 +1000,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
__ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- // Setup the four remaining stack slots.
+ // Set up the four remaining stack slots.
__ push(eax); // Map.
__ push(edx); // Enumeration cache.
__ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
@@ -973,6 +1011,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register eax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(ebx, cell);
+ __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
+ Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+
__ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
__ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -987,6 +1035,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(Immediate(Smi::FromInt(0))); // Initial index.
// Generate code for doing the condition check.
+ PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
__ bind(&loop);
__ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
__ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
@@ -1029,7 +1078,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(result_register(), ebx);
// Perform the assignment as if via '='.
{ EffectContext context(this);
- EmitAssignment(stmt->each(), stmt->AssignmentId());
+ EmitAssignment(stmt->each());
}
// Generate code for the body of the loop.
@@ -1040,7 +1089,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(loop_statement.continue_label());
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &loop);
__ jmp(&loop);
// Remove the pointers stored on the stack.
@@ -1048,6 +1097,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ add(esp, Immediate(5 * kPointerSize));
// Exit and decrement the loop depth.
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@@ -1066,7 +1116,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode_flag());
+ FastNewClosureStub stub(info->language_mode());
__ push(Immediate(info));
__ CallStub(&stub);
} else {
@@ -1137,13 +1187,13 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
// All extension objects were empty and it is safe to use a global
// load IC call.
- __ mov(eax, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ mov(eax, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
- __ call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1221,10 +1271,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in ecx and the global
// object in eax.
- __ mov(eax, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ mov(eax, GlobalObjectOperand());
__ mov(ecx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(eax);
break;
}
@@ -1235,27 +1285,63 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, var->IsContextSlot()
? "Context variable"
: "Stack variable");
- if (!var->binding_needs_init()) {
- context()->Plug(var);
- } else {
- // Let and const need a read barrier.
- Label done;
- GetVar(eax, var);
- __ cmp(eax, isolate()->factory()->the_hole_value());
- __ j(not_equal, &done, Label::kNear);
- if (var->mode() == LET || var->mode() == CONST_HARMONY) {
- // Throw a reference error when using an uninitialized let/const
- // binding in harmony mode.
- __ push(Immediate(var->name()));
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ if (var->binding_needs_init()) {
+ // var->scope() may be NULL when the proxy is located in eval code and
+ // refers to a potential outside binding. Currently those bindings are
+ // always looked up dynamically, i.e. in that case
+ // var->location() == LOOKUP.
+ // always holds.
+ ASSERT(var->scope() != NULL);
+
+ // Check if the binding really needs an initialization check. The check
+ // can be skipped in the following situation: we have a LET or CONST
+ // binding in harmony mode, both the Variable and the VariableProxy have
+ // the same declaration scope (i.e. they are both in global code, in the
+ // same function or in the same eval code) and the VariableProxy is in
+ // the source physically located after the initializer of the variable.
+ //
+ // We cannot skip any initialization checks for CONST in non-harmony
+ // mode because const variables may be declared but never initialized:
+ // if (false) { const x; }; var y = x;
+ //
+ // The condition on the declaration scopes is a conservative check for
+ // nested functions that access a binding and are called before the
+ // binding is initialized:
+ // function() { f(); let x = 1; function f() { x = 2; } }
+ //
+ bool skip_init_check;
+ if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
+ skip_init_check = false;
} else {
- // Uninitalized const bindings outside of harmony mode are unholed.
- ASSERT(var->mode() == CONST);
- __ mov(eax, isolate()->factory()->undefined_value());
+ // Check that we always have valid source position.
+ ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
+ ASSERT(proxy->position() != RelocInfo::kNoPosition);
+ skip_init_check = var->mode() != CONST &&
+ var->initializer_position() < proxy->position();
+ }
+
+ if (!skip_init_check) {
+ // Let and const need a read barrier.
+ Label done;
+ GetVar(eax, var);
+ __ cmp(eax, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &done, Label::kNear);
+ if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+ // Throw a reference error when using an uninitialized let/const
+ // binding in harmony mode.
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ } else {
+ // Uninitalized const bindings outside of harmony mode are unholed.
+ ASSERT(var->mode() == CONST);
+ __ mov(eax, isolate()->factory()->undefined_value());
+ }
+ __ bind(&done);
+ context()->Plug(eax);
+ break;
}
- __ bind(&done);
- context()->Plug(eax);
}
+ context()->Plug(var);
break;
}
@@ -1331,12 +1417,22 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+void FullCodeGenerator::EmitAccessor(Expression* expression) {
+ if (expression == NULL) {
+ __ push(Immediate(isolate()->factory()->null_value()));
+ } else {
+ VisitForStackValue(expression);
+ }
+}
+
+
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
+ Handle<FixedArray> constant_properties = expr->constant_properties();
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
- __ push(Immediate(expr->constant_properties()));
+ __ push(Immediate(constant_properties));
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
@@ -1344,10 +1440,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
__ push(Immediate(Smi::FromInt(flags)));
+ int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ __ CallStub(&stub);
}
// If result_saved is true the result is on top of the stack. If
@@ -1359,6 +1460,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
+ AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1370,6 +1472,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
result_saved = true;
}
switch (property->kind()) {
+ case ObjectLiteral::Property::CONSTANT:
+ UNREACHABLE();
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
// Fall through.
@@ -1379,10 +1483,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
VisitForAccumulatorValue(value);
__ mov(ecx, Immediate(key->handle()));
__ mov(edx, Operand(esp, 0));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, key->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1401,20 +1505,28 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Drop(3);
}
break;
- case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
- __ push(Operand(esp, 0)); // Duplicate receiver.
- VisitForStackValue(key);
- __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- VisitForStackValue(value);
- __ CallRuntime(Runtime::kDefineAccessor, 4);
+ accessor_table.lookup(key)->second->getter = value;
+ break;
+ case ObjectLiteral::Property::SETTER:
+ accessor_table.lookup(key)->second->setter = value;
break;
- default: UNREACHABLE();
}
}
+ // Emit code to define accessors, using only a single call to the runtime for
+ // each pair of corresponding getters and setters.
+ for (AccessorTable::Iterator it = accessor_table.begin();
+ it != accessor_table.end();
+ ++it) {
+ __ push(Operand(esp, 0)); // Duplicate receiver.
+ VisitForStackValue(it->first);
+ EmitAccessor(it->second->getter);
+ EmitAccessor(it->second->setter);
+ __ push(Immediate(Smi::FromInt(NONE)));
+ __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ }
+
if (expr->has_function()) {
ASSERT(result_saved);
__ push(Operand(esp, 0));
@@ -1438,6 +1550,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1445,13 +1558,16 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(constant_elements));
- if (constant_elements_values->map() ==
- isolate()->heap()->fixed_cow_array_map()) {
- ASSERT(expr->depth() == 1);
+ Heap* heap = isolate()->heap();
+ if (has_constant_fast_elements &&
+ constant_elements_values->map() == heap->fixed_cow_array_map()) {
+ // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // change, so it's possible to specialize the stub in advance.
+ __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
+ length);
__ CallStub(&stub);
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
} else if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1460,10 +1576,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(constant_elements_kind == FAST_ELEMENTS ||
constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // change, so it's possible to specialize the stub in advance.
+ FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
+ ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
__ CallStub(&stub);
}
@@ -1487,13 +1604,28 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- // Store the subexpression value in the array's elements.
- __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
- __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
- __ mov(ecx, Immediate(Smi::FromInt(i)));
- __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
- StoreArrayLiteralElementStub stub;
- __ CallStub(&stub);
+ if (constant_elements_kind == FAST_ELEMENTS) {
+ // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
+ // transition and don't need to call the runtime stub.
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
+ __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
+ __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
+ // Store the subexpression value in the array's elements.
+ __ mov(FieldOperand(ebx, offset), result_register());
+ // Update the write barrier for the array store.
+ __ RecordWriteField(ebx, offset, result_register(), ecx,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ INLINE_SMI_CHECK);
+ } else {
+ // Store the subexpression value in the array's elements.
+ __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
+ __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
+ __ mov(ecx, Immediate(Smi::FromInt(i)));
+ __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
+ StoreArrayLiteralElementStub stub;
+ __ CallStub(&stub);
+ }
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
@@ -1626,14 +1758,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
ASSERT(!key->handle()->IsSmi());
__ mov(ecx, Immediate(key->handle()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1654,7 +1786,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ mov(eax, ecx);
BinaryOpStub stub(op, mode);
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
@@ -1739,13 +1871,13 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(edx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(eax);
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1777,10 +1909,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(edx, eax);
__ pop(eax); // Restore value.
__ mov(ecx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1790,14 +1922,13 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(ecx, eax);
__ pop(edx);
__ pop(eax); // Restore value.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
}
- PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(eax);
}
@@ -1807,11 +1938,11 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
if (var->IsUnallocated()) {
// Global var, const, or let.
__ mov(ecx, var->name());
- __ mov(edx, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ mov(edx, GlobalObjectOperand());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
@@ -1842,7 +1973,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ push(eax); // Value.
__ push(esi); // Context.
__ push(Immediate(var->name()));
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ push(Immediate(Smi::FromInt(language_mode())));
__ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
@@ -1885,7 +2016,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ push(eax); // Value.
__ push(esi); // Context.
__ push(Immediate(var->name()));
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ push(Immediate(Smi::FromInt(language_mode())));
__ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
@@ -1917,10 +2048,10 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
} else {
__ pop(edx);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -1957,10 +2088,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -1994,6 +2125,16 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ call(code, rmode, ast_id);
+}
+
+
+
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2010,7 +2151,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
SetSourcePosition(expr->position());
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2042,7 +2183,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2063,29 +2204,20 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
SetSourcePosition(expr->position());
// Record call targets in unoptimized code, but not in the snapshot.
- // TODO(1789): Reenable temporarily disabled recording CallFunctionStub
- // when the issue is fixed.
- bool record_call_target = false && !Serializer::enabled();
- if (record_call_target) {
+ if (!Serializer::enabled()) {
flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
- }
- CallFunctionStub stub(arg_count, flags);
- __ CallStub(&stub, expr->id());
- if (record_call_target) {
- // There is a one element cache in the instruction stream.
-#ifdef DEBUG
- int return_site_offset = masm()->pc_offset();
-#endif
Handle<Object> uninitialized =
- CallFunctionStub::UninitializedSentinel(isolate());
+ TypeFeedbackCells::UninitializedSentinel(isolate());
Handle<JSGlobalPropertyCell> cell =
isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
- __ test(eax, Immediate(cell));
- // Patching code in the stub assumes the opcode is 1 byte and there is
- // word for a pointer in the operand.
- ASSERT(masm()->pc_offset() - return_site_offset >= 1 + kPointerSize);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ mov(ebx, cell);
}
+ CallFunctionStub stub(arg_count, flags);
+ __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
+ __ CallStub(&stub, expr->id());
+
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2103,16 +2235,13 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
// Push the receiver of the enclosing function.
__ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
+ // Push the language mode.
+ __ push(Immediate(Smi::FromInt(language_mode())));
- // Push the strict mode flag. In harmony mode every eval call
- // is a strict mode eval call.
- StrictModeFlag strict_mode =
- FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
- __ push(Immediate(Smi::FromInt(strict_mode)));
-
- // Push the qml mode flag
- __ push(Immediate(Smi::FromInt(is_qml_mode())));
+ // Push the start position of the scope the calls resides in.
+ __ push(Immediate(Smi::FromInt(scope()->start_position())));
+ // Do the runtime call.
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
}
@@ -2157,6 +2286,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
+ __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2165,7 +2295,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
} else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
- __ push(proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ push(GlobalObjectOperand());
EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -2261,9 +2391,23 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ SafeSet(eax, Immediate(arg_count));
__ mov(edi, Operand(esp, arg_count * kPointerSize));
- Handle<Code> construct_builtin =
- isolate()->builtins()->JSConstructCall();
- __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
+ // Record call targets in unoptimized code, but not in the snapshot.
+ CallFunctionFlags flags;
+ if (!Serializer::enabled()) {
+ flags = RECORD_CALL_TARGET;
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ mov(ebx, cell);
+ } else {
+ flags = NO_CALL_FUNCTION_FLAGS;
+ }
+
+ CallConstructStub stub(flags);
+ __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(eax);
}
@@ -2811,6 +2955,48 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = eax;
+ Register result = eax;
+ Register scratch = ecx;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand::StaticVariable(stamp));
+ __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(Operand(esp, 0), object);
+ __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+ context()->Plug(result);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
@@ -2819,7 +3005,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
VisitForStackValue(args->at(1));
if (CpuFeatures::IsSupported(SSE2)) {
- MathPowStub stub;
+ MathPowStub stub(MathPowStub::ON_STACK);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kMath_pow, 2);
@@ -2898,7 +3084,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Register object = ebx;
Register index = eax;
- Register scratch = ecx;
Register result = edx;
__ pop(object);
@@ -2908,7 +3093,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Label done;
StringCharCodeAtGenerator generator(object,
index,
- scratch,
result,
&need_conversion,
&need_conversion,
@@ -2946,8 +3130,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Register object = ebx;
Register index = eax;
- Register scratch1 = ecx;
- Register scratch2 = edx;
+ Register scratch = edx;
Register result = eax;
__ pop(object);
@@ -2957,8 +3140,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Label done;
StringCharAtGenerator generator(object,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&need_conversion,
&need_conversion,
@@ -3037,6 +3219,18 @@ void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
+ // Load the argument on the stack and call the stub.
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::TAGGED);
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 1);
+ VisitForStackValue(args->at(0));
+ __ CallStub(&stub);
+ context()->Plug(eax);
+}
+
+
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3069,12 +3263,24 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
+ // Check for proxy.
+ Label proxy, done;
+ __ CmpObjectType(eax, JS_FUNCTION_PROXY_TYPE, ebx);
+ __ j(equal, &proxy);
+
// InvokeFunction requires the function in edi. Move it in there.
__ mov(edi, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(edi, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ jmp(&done);
+
+ __ bind(&proxy);
+ __ push(eax);
+ __ CallRuntime(Runtime::kCall, args->length());
+ __ bind(&done);
+
context()->Plug(eax);
}
@@ -3487,7 +3693,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// One-character separator case
__ bind(&one_char_separator);
- // Replace separator with its ascii character value.
+ // Replace separator with its ASCII character value.
__ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
__ mov_b(separator_operand, scratch);
@@ -3607,7 +3813,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
} else {
@@ -3628,16 +3834,18 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
+ __ push(Immediate(Smi::FromInt(strict_mode_flag)));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(eax);
} else if (proxy != NULL) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is allowed.
- ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
+ ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
if (var->IsUnallocated()) {
- __ push(var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ push(GlobalObjectOperand());
__ push(Immediate(var->name()));
__ push(Immediate(Smi::FromInt(kNonStrictMode)));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
@@ -3701,7 +3909,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (context()->IsAccumulatorValue()) {
__ mov(eax, isolate()->factory()->true_value());
} else {
- __ push(isolate()->factory()->true_value());
+ __ Push(isolate()->factory()->true_value());
}
__ jmp(&done, Label::kNear);
__ bind(&materialize_false);
@@ -3709,7 +3917,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (context()->IsAccumulatorValue()) {
__ mov(eax, isolate()->factory()->false_value());
} else {
- __ push(isolate()->factory()->false_value());
+ __ Push(isolate()->factory()->false_value());
}
__ bind(&done);
}
@@ -3763,7 +3971,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register eax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(eax);
}
@@ -3883,7 +4091,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -3914,10 +4122,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case NAMED_PROPERTY: {
__ mov(ecx, prop->key()->AsLiteral()->handle());
__ pop(edx);
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3931,10 +4139,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ pop(ecx);
__ pop(edx);
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
// Result is on the stack
@@ -3957,12 +4165,12 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
- __ mov(eax, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ mov(eax, GlobalObjectOperand());
__ mov(ecx, Immediate(proxy->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(eax);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4142,7 +4350,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
diff --git a/src/3rdparty/v8/src/ia32/ic-ia32.cc b/src/3rdparty/v8/src/ia32/ic-ia32.cc
index 1168932..33f247a 100644
--- a/src/3rdparty/v8/src/ia32/ic-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/ic-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -473,7 +473,6 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
Counters* counters = isolate->counters();
__ IncrementCounter(counters->keyed_load_generic_smi(), 1);
__ ret(0);
-
__ bind(&check_number_dictionary);
__ mov(ebx, eax);
__ SmiUntag(ebx);
@@ -535,14 +534,34 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ mov(edi, FieldOperand(eax, String::kHashFieldOffset));
__ shr(edi, String::kHashShift);
__ xor_(ecx, edi);
- __ and_(ecx, KeyedLookupCache::kCapacityMask);
+ __ and_(ecx, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label load_in_object_property;
+ static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
+ Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(masm->isolate());
- __ mov(edi, ecx);
+
+ for (int i = 0; i < kEntriesPerBucket - 1; i++) {
+ Label try_next_entry;
+ __ mov(edi, ecx);
+ __ shl(edi, kPointerSizeLog2 + 1);
+ if (i != 0) {
+ __ add(edi, Immediate(kPointerSize * i * 2));
+ }
+ __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
+ __ j(not_equal, &try_next_entry);
+ __ add(edi, Immediate(kPointerSize));
+ __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
+ __ j(equal, &hit_on_nth_entry[i]);
+ __ bind(&try_next_entry);
+ }
+
+ __ lea(edi, Operand(ecx, 1));
__ shl(edi, kPointerSizeLog2 + 1);
+ __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
__ add(edi, Immediate(kPointerSize));
@@ -556,13 +575,25 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// ecx : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
- __ mov(edi,
- Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
- __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
- __ sub(edi, ecx);
- __ j(above_equal, &property_array_property);
+
+ // Hit on nth entry.
+ for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
+ __ bind(&hit_on_nth_entry[i]);
+ if (i != 0) {
+ __ add(ecx, Immediate(i));
+ }
+ __ mov(edi,
+ Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
+ __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
+ __ sub(edi, ecx);
+ __ j(above_equal, &property_array_property);
+ if (i != 0) {
+ __ jmp(&load_in_object_property);
+ }
+ }
// Load in-object property.
+ __ bind(&load_in_object_property);
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ add(ecx, edi);
__ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
@@ -606,14 +637,12 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
Register receiver = edx;
Register index = eax;
- Register scratch1 = ebx;
- Register scratch2 = ecx;
+ Register scratch = ecx;
Register result = eax;
StringCharAtGenerator char_at_generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -736,7 +765,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
// -----------------------------------
Label slow, fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
- Label check_if_double_array, array, extra;
+ Label check_if_double_array, array, extra, transition_smi_elements;
+ Label finish_object_store, non_double_value, transition_double_elements;
// Check that the object isn't a smi.
__ JumpIfSmi(edx, &slow);
@@ -833,11 +863,12 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ ret(0);
__ bind(&non_smi_value);
- // Escape to slow case when writing non-smi into smi-only array.
+ // Escape to elements kind transition case.
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(edi, &slow, Label::kNear);
+ __ CheckFastObjectElements(edi, &transition_smi_elements);
// Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
__ mov(CodeGenerator::FixedArrayElementOperand(ebx, ecx), eax);
// Update write barrier for the elements array address.
__ mov(edx, eax); // Preserve the value which is returned.
@@ -853,8 +884,54 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ bind(&fast_double_without_map_check);
// If the value is a number, store it as a double in the FastDoubleElements
// array.
- __ StoreNumberToDoubleElements(eax, ebx, ecx, edx, xmm0, &slow, false);
+ __ StoreNumberToDoubleElements(eax, ebx, ecx, edx, xmm0,
+ &transition_double_elements, false);
__ ret(0);
+
+ __ bind(&transition_smi_elements);
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+
+ // Transition the array appropriately depending on the value type.
+ __ CheckMap(eax,
+ masm->isolate()->factory()->heap_number_map(),
+ &non_double_value,
+ DONT_DO_SMI_CHECK);
+
+ // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ ebx,
+ edi,
+ &slow);
+ ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ &slow);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ &slow);
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
}
@@ -1376,10 +1453,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// -- esp[0] : return address
// -----------------------------------
//
- // This accepts as a receiver anything JSObject::SetElementsLength accepts
+ // This accepts as a receiver anything JSArray::SetElementsLength accepts
// (currently anything except for external arrays which means anything with
- // elements of FixedArray type.), but currently is restricted to JSArray.
- // Value must be a number, but only smis are accepted as the most common case.
+ // elements of FixedArray type). Value must be a number, but only smis are
+ // accepted as the most common case.
Label miss;
@@ -1401,6 +1478,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
__ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
__ j(not_equal, &miss);
+ // Check that the array has fast properties, otherwise the length
+ // property might have been redefined.
+ __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
+ __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
+ Heap::kHashTableMapRootIndex);
+ __ j(equal, &miss);
+
// Check that value is a smi.
__ JumpIfNotSmi(value, &miss);
@@ -1555,6 +1639,9 @@ void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
__ pop(ebx);
__ push(edx);
__ push(ebx); // return address
+ // Leaving the code managed by the register allocator and return to the
+ // convention of using esi as context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
}
@@ -1578,6 +1665,9 @@ void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
__ pop(ebx);
__ push(edx);
__ push(ebx); // return address
+ // Leaving the code managed by the register allocator and return to the
+ // convention of using esi as context register.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
}
@@ -1627,6 +1717,9 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
rewritten = stub.GetCode();
} else {
ICCompareStub stub(op_, state);
+ if (state == KNOWN_OBJECTS) {
+ stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
+ }
rewritten = stub.GetCode();
}
set_target(*rewritten);
diff --git a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
index 2a23bf1..72f59d0 100644
--- a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,6 +33,7 @@
#include "code-stubs.h"
#include "deoptimizer.h"
#include "stub-cache.h"
+#include "codegen.h"
namespace v8 {
namespace internal {
@@ -44,29 +45,29 @@ class SafepointGenerator : public CallWrapper {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index)
+ Safepoint::DeoptMode mode)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index) {}
+ deopt_mode_(mode) {}
virtual ~SafepointGenerator() { }
virtual void BeforeCall(int call_size) const {}
virtual void AfterCall() const {
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
+ codegen_->RecordSafepoint(pointers_, deopt_mode_);
}
private:
LCodeGen* codegen_;
LPointerMap* pointers_;
- int deoptimization_index_;
+ Safepoint::DeoptMode deopt_mode_;
};
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope(SSE2);
@@ -78,9 +79,6 @@ bool LCodeGen::GenerateCode() {
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::MANUAL);
- dynamic_frame_alignment_ = chunk()->num_double_slots() > 2 ||
- info()->osr_ast_id() != AstNode::kNoNumber;
-
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
@@ -144,7 +142,7 @@ bool LCodeGen::GeneratePrologue() {
// with undefined when called as functions (without an explicit
// receiver object). ecx is zero for method calls and non-zero for
// function calls.
- if (info_->is_strict_mode() || info_->is_native()) {
+ if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
__ test(ecx, Operand(ecx));
__ j(zero, &ok, Label::kNear);
@@ -155,29 +153,6 @@ bool LCodeGen::GeneratePrologue() {
__ bind(&ok);
}
- if (dynamic_frame_alignment_) {
- Label do_not_pad, align_loop;
- STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
- // Align esp to a multiple of 2 * kPointerSize.
- __ test(esp, Immediate(kPointerSize));
- __ j(zero, &do_not_pad, Label::kNear);
- __ push(Immediate(0));
- __ mov(ebx, esp);
- // Copy arguments, receiver, and return address.
- __ mov(ecx, Immediate(scope()->num_parameters() + 2));
-
- __ bind(&align_loop);
- __ mov(eax, Operand(ebx, 1 * kPointerSize));
- __ mov(Operand(ebx, 0), eax);
- __ add(Operand(ebx), Immediate(kPointerSize));
- __ dec(ecx);
- __ j(not_zero, &align_loop, Label::kNear);
- __ mov(Operand(ebx, 0),
- Immediate(isolate()->factory()->frame_alignment_marker()));
-
- __ bind(&do_not_pad);
- }
-
__ push(ebp); // Caller's frame pointer.
__ mov(ebp, esp);
__ push(esi); // Callee's context.
@@ -211,18 +186,17 @@ bool LCodeGen::GeneratePrologue() {
// Possibly allocate a local context.
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is still in edi.
__ push(edi);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
}
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
+ RecordSafepoint(Safepoint::kNoLazyDeopt);
// Context is returned in both eax and esi. It replaces the context
// passed to us. It's saved in the stack and kept live in esi.
__ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
@@ -277,19 +251,11 @@ bool LCodeGen::GenerateBody() {
instr->CompileToNative(this);
}
}
+ EnsureSpaceForLazyDeopt();
return !is_aborted();
}
-LInstruction* LCodeGen::GetNextInstruction() {
- if (current_instruction_ < instructions_->length() - 1) {
- return instructions_->at(current_instruction_ + 1);
- } else {
- return NULL;
- }
-}
-
-
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@@ -302,13 +268,6 @@ bool LCodeGen::GenerateDeferredCode() {
code->Generate();
__ jmp(code->exit());
}
-
- // Pad code to ensure that the last piece of deferred code have
- // room for lazy bailout.
- while ((masm()->pc_offset() - LastSafepointEnd())
- < Deoptimizer::patch_size()) {
- __ nop();
- }
}
// Deferred code is the last part of the instruction sequence. Mark
@@ -356,24 +315,21 @@ int LCodeGen::ToInteger32(LConstantOperand* op) const {
}
+Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
+ Handle<Object> literal = chunk_->LookupLiteral(op);
+ ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
+ return literal;
+}
+
+
double LCodeGen::ToDouble(LConstantOperand* op) const {
Handle<Object> value = chunk_->LookupLiteral(op);
return value->Number();
}
-Immediate LCodeGen::ToImmediate(LOperand* op) {
- LConstantOperand* const_op = LConstantOperand::cast(op);
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
- Representation r = chunk_->LookupLiteralRepresentation(const_op);
- if (r.IsInteger32()) {
- ASSERT(literal->IsNumber());
- return Immediate(static_cast<int32_t>(literal->Number()));
- } else if (r.IsDouble()) {
- Abort("unsupported double immediate");
- }
- ASSERT(r.IsTagged());
- return Immediate(literal);
+bool LCodeGen::IsInteger32(LConstantOperand* op) const {
+ return chunk_->LookupLiteralRepresentation(op).IsInteger32();
}
@@ -412,7 +368,19 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- translation->BeginFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
+ }
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -487,10 +455,8 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
ASSERT(instr != NULL);
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
-
__ call(code, mode);
-
- RegisterLazyDeoptimization(instr, safepoint_mode);
+ RecordSafepointWithLazyDeopt(instr, safepoint_mode);
// Signal that we don't inline smi code before these stubs in the
// optimizing code generator.
@@ -518,7 +484,7 @@ void LCodeGen::CallRuntime(const Runtime::Function* fun,
__ CallRuntime(fun, argc);
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
}
@@ -535,44 +501,19 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
} else if (context->IsConstantOperand()) {
Handle<Object> literal =
chunk_->LookupLiteral(LConstantOperand::cast(context));
- LoadHeapObject(esi, Handle<Context>::cast(literal));
+ __ LoadHeapObject(esi, Handle<Context>::cast(literal));
} else {
UNREACHABLE();
}
__ CallRuntimeSaveDoubles(id);
RecordSafepointWithRegisters(
- instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
-}
-
-
-void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode) {
- // Create the environment to bailout to. If the call has side effects
- // execution has to continue after the call otherwise execution can continue
- // from a previous bailout point repeating the call.
- LEnvironment* deoptimization_environment;
- if (instr->HasDeoptimizationEnvironment()) {
- deoptimization_environment = instr->deoptimization_environment();
- } else {
- deoptimization_environment = instr->environment();
- }
-
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
- if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
- RecordSafepoint(instr->pointer_map(),
- deoptimization_environment->deoptimization_index());
- } else {
- ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- RecordSafepointWithRegisters(
- instr->pointer_map(),
- 0,
- deoptimization_environment->deoptimization_index());
- }
+ instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
}
-void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
+void LCodeGen::RegisterEnvironmentForDeoptimization(
+ LEnvironment* environment, Safepoint::DeoptMode mode) {
if (!environment->HasBeenRegistered()) {
// Physical stack frame layout:
// -x ............. -4 0 ..................................... y
@@ -588,24 +529,30 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
// |>------------ translation_size ------------<|
int frame_count = 0;
+ int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
+ if (e->frame_type() == JS_FUNCTION) {
+ ++jsframe_count;
+ }
}
- Translation translation(&translations_, frame_count);
+ Translation translation(&translations_, frame_count, jsframe_count);
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
- environment->Register(deoptimization_index, translation.index());
+ int pc_offset = masm()->pc_offset();
+ environment->Register(deoptimization_index,
+ translation.index(),
+ (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
deoptimizations_.Add(environment);
}
}
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
- ASSERT(entry != NULL);
if (entry == NULL) {
Abort("bailout was not prepared");
return;
@@ -656,7 +603,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
@@ -681,6 +627,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
+ data->SetPc(i, Smi::FromInt(env->pc_offset()));
}
code->set_deoptimization_data(*data);
}
@@ -712,15 +659,27 @@ void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
}
+void LCodeGen::RecordSafepointWithLazyDeopt(
+ LInstruction* instr, SafepointMode safepoint_mode) {
+ if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
+ RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
+ } else {
+ ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 0, Safepoint::kLazyDeopt);
+ }
+}
+
+
void LCodeGen::RecordSafepoint(
LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index) {
+ Safepoint::DeoptMode deopt_mode) {
ASSERT(kind == expected_safepoint_kind_);
const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
- Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
- kind, arguments, deoptimization_index);
+ Safepoint safepoint =
+ safepoints_.DefineSafepoint(masm(), kind, arguments, deopt_mode);
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
@@ -733,22 +692,21 @@ void LCodeGen::RecordSafepoint(
void LCodeGen::RecordSafepoint(LPointerMap* pointers,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
+ Safepoint::DeoptMode mode) {
+ RecordSafepoint(pointers, Safepoint::kSimple, 0, mode);
}
-void LCodeGen::RecordSafepoint(int deoptimization_index) {
+void LCodeGen::RecordSafepoint(Safepoint::DeoptMode mode) {
LPointerMap empty_pointers(RelocInfo::kNoPosition);
- RecordSafepoint(&empty_pointers, deoptimization_index);
+ RecordSafepoint(&empty_pointers, mode);
}
void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode mode) {
+ RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, mode);
}
@@ -783,12 +741,6 @@ void LCodeGen::DoGap(LGap* gap) {
LParallelMove* move = gap->GetParallelMove(inner_pos);
if (move != NULL) DoParallelMove(move);
}
-
- LInstruction* next = GetNextInstruction();
- if (next != NULL && next->IsLazyBailout()) {
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
- }
}
@@ -1193,7 +1145,7 @@ void LCodeGen::DoSubI(LSubI* instr) {
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
- __ sub(ToOperand(left), ToImmediate(right));
+ __ sub(ToOperand(left), ToInteger32Immediate(right));
} else {
__ sub(ToRegister(left), ToOperand(right));
}
@@ -1252,7 +1204,7 @@ void LCodeGen::DoConstantT(LConstantT* instr) {
Register reg = ToRegister(instr->result());
Handle<Object> handle = instr->value();
if (handle->IsHeapObject()) {
- LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
+ __ LoadHeapObject(reg, Handle<HeapObject>::cast(handle));
} else {
__ Set(reg, Immediate(handle));
}
@@ -1294,6 +1246,7 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
Register result = ToRegister(instr->result());
Register map = ToRegister(instr->TempAt(0));
ASSERT(input.is(result));
+
Label done;
// If the object is a smi return the object.
__ JumpIfSmi(input, &done, Label::kNear);
@@ -1307,6 +1260,43 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(eax));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ mov(scratch, Operand::StaticVariable(stamp));
+ __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ mov(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ mov(Operand(esp, 0), object);
+ __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->Equals(instr->result()));
@@ -1332,7 +1322,7 @@ void LCodeGen::DoAddI(LAddI* instr) {
ASSERT(left->Equals(instr->result()));
if (right->IsConstantOperand()) {
- __ add(ToOperand(left), ToImmediate(right));
+ __ add(ToOperand(left), ToInteger32Immediate(right));
} else {
__ add(ToRegister(left), ToOperand(right));
}
@@ -1604,9 +1594,9 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
__ j(parity_even, chunk_->GetAssemblyLabel(false_block));
} else {
if (right->IsConstantOperand()) {
- __ cmp(ToRegister(left), ToImmediate(right));
+ __ cmp(ToRegister(left), ToInteger32Immediate(right));
} else if (left->IsConstantOperand()) {
- __ cmp(ToOperand(right), ToImmediate(left));
+ __ cmp(ToOperand(right), ToInteger32Immediate(left));
// We transposed the operands. Reverse the condition.
cc = ReverseCondition(cc);
} else {
@@ -1717,6 +1707,31 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
}
+Condition LCodeGen::EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string) {
+ __ JumpIfSmi(input, is_not_string);
+
+ Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
+
+ return cond;
+}
+
+
+void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
+ Register reg = ToRegister(instr->InputAt(0));
+ Register temp = ToRegister(instr->TempAt(0));
+
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
+
+ Condition true_cond = EmitIsString(reg, temp, false_label);
+
+ EmitBranch(true_block, false_block, true_cond);
+}
+
+
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
Operand input = ToOperand(instr->InputAt(0));
@@ -1744,6 +1759,41 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
}
+static Condition ComputeCompareCondition(Token::Value op) {
+ switch (op) {
+ case Token::EQ_STRICT:
+ case Token::EQ:
+ return equal;
+ case Token::LT:
+ return less;
+ case Token::GT:
+ return greater;
+ case Token::LTE:
+ return less_equal;
+ case Token::GTE:
+ return greater_equal;
+ default:
+ UNREACHABLE();
+ return no_condition;
+ }
+}
+
+
+void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
+ Token::Value op = instr->op();
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+
+ Condition condition = ComputeCompareCondition(op);
+ __ test(eax, Operand(eax));
+
+ EmitBranch(true_block, false_block, condition);
+}
+
+
static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
@@ -1807,7 +1857,7 @@ void LCodeGen::DoHasCachedArrayIndexAndBranch(
// Branches to a label or falls through with the answer in the z flag. Trashes
-// the temp registers, but not the input. Only input and temp2 may alias.
+// the temp registers, but not the input.
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String>class_name,
@@ -1815,7 +1865,8 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
Register temp,
Register temp2) {
ASSERT(!input.is(temp));
- ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
+ ASSERT(!input.is(temp2));
+ ASSERT(!temp.is(temp2));
__ JumpIfSmi(input, is_false);
if (class_name->IsEqualTo(CStrVector("Function"))) {
@@ -1836,11 +1887,10 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
// Faster code path to avoid two compares: subtract lower bound from the
// actual type and do a signed compare with the width of the type range.
__ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
- __ mov(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
+ __ movzx_b(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
__ sub(Operand(temp2), Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
- __ cmpb(Operand(temp2),
- static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
- FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
+ __ cmp(Operand(temp2), Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
+ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
__ j(above, is_false);
}
@@ -1875,12 +1925,7 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
Register input = ToRegister(instr->InputAt(0));
Register temp = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
- if (input.is(temp)) {
- // Swap.
- Register swapper = temp;
- temp = temp2;
- temp2 = swapper;
- }
+
Handle<String> class_name = instr->hydrogen()->class_name();
int true_block = chunk_->LookupDestination(instr->true_block_id());
@@ -1929,7 +1974,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
+ codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
virtual LInstruction* instr() { return instr_; }
Label* map_check() { return &map_check_; }
@@ -1955,7 +2000,9 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
- __ cmp(map, factory()->the_hole_value()); // Patched to cached map.
+ Handle<JSGlobalPropertyCell> cache_cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ cmp(map, Operand::Cell(cache_cell)); // Patched to cached map.
__ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
__ jmp(&done);
@@ -1984,8 +2031,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
}
-void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
+void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check) {
PushSafepointRegistersScope scope(this);
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
@@ -2003,7 +2050,7 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// the stub.
Register temp = ToRegister(instr->TempAt(0));
ASSERT(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
- __ mov(InstanceofStub::right(), Immediate(instr->function()));
+ __ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 13;
int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
__ mov(temp, Immediate(delta));
@@ -2012,31 +2059,15 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasDeoptimizationEnvironment());
+ LEnvironment* env = instr->deoptimization_environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
+
// Put the result value into the eax slot and restore all registers.
__ StoreToSafepointRegisterSlot(eax, eax);
}
-static Condition ComputeCompareCondition(Token::Value op) {
- switch (op) {
- case Token::EQ_STRICT:
- case Token::EQ:
- return equal;
- case Token::LT:
- return less;
- case Token::GT:
- return greater;
- case Token::LTE:
- return less_equal;
- case Token::GTE:
- return greater_equal;
- default:
- UNREACHABLE();
- return no_condition;
- }
-}
-
-
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
@@ -2067,17 +2098,6 @@ void LCodeGen::DoReturn(LReturn* instr) {
}
__ mov(esp, ebp);
__ pop(ebp);
- if (dynamic_frame_alignment_) {
- Label aligned;
- // Frame alignment marker (padding) is below arguments,
- // and receiver, so its return-address-relative offset is
- // (num_arguments + 2) words.
- __ cmp(Operand(esp, (GetParameterCount() + 2) * kPointerSize),
- Immediate(factory()->frame_alignment_marker()));
- __ j(not_equal, &aligned);
- __ Ret((GetParameterCount() + 2) * kPointerSize, ecx);
- __ bind(&aligned);
- }
__ Ret((GetParameterCount() + 1) * kPointerSize, ecx);
}
@@ -2106,40 +2126,21 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
- Register object = ToRegister(instr->TempAt(0));
- Register address = ToRegister(instr->TempAt(1));
- Register value = ToRegister(instr->InputAt(0));
- ASSERT(!value.is(object));
- Handle<JSGlobalPropertyCell> cell_handle(instr->hydrogen()->cell());
-
- int offset = JSGlobalPropertyCell::kValueOffset;
- __ mov(object, Immediate(cell_handle));
+ Register value = ToRegister(instr->value());
+ Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
// If the cell we are storing to contains the hole it could have
// been deleted from the property dictionary. In that case, we need
// to update the property details in the property dictionary to mark
// it as no longer deleted. We deoptimize in that case.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ cmp(FieldOperand(object, offset), factory()->the_hole_value());
+ __ cmp(Operand::Cell(cell_handle), factory()->the_hole_value());
DeoptimizeIf(equal, instr->environment());
}
// Store the value.
- __ mov(FieldOperand(object, offset), value);
-
- // Cells are always in the remembered set.
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- HType type = instr->hydrogen()->value()->type();
- SmiCheck check_needed =
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- __ RecordWriteField(object,
- offset,
- value,
- address,
- kSaveFPRegs,
- OMIT_REMEMBERED_SET,
- check_needed);
- }
+ __ mov(Operand::Cell(cell_handle), value);
+ // Cells are always rescanned, so no write barrier here.
}
@@ -2149,7 +2150,7 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(eax));
__ mov(ecx, instr->name());
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
@@ -2160,13 +2161,38 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
__ mov(result, ContextOperand(context, instr->slot_index()));
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ cmp(result, factory()->the_hole_value());
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(equal, instr->environment());
+ } else {
+ Label is_not_hole;
+ __ j(not_equal, &is_not_hole, Label::kNear);
+ __ mov(result, factory()->undefined_value());
+ __ bind(&is_not_hole);
+ }
+ }
}
void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
Register context = ToRegister(instr->context());
Register value = ToRegister(instr->value());
- __ mov(ContextOperand(context, instr->slot_index()), value);
+
+ Label skip_assignment;
+
+ Operand target = ContextOperand(context, instr->slot_index());
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ cmp(target, factory()->the_hole_value());
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(equal, instr->environment());
+ } else {
+ __ j(not_equal, &skip_assignment, Label::kNear);
+ }
+ }
+
+ __ mov(target, value);
if (instr->hydrogen()->NeedsWriteBarrier()) {
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
@@ -2181,6 +2207,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
EMIT_REMEMBERED_SET,
check_needed);
}
+
+ __ bind(&skip_assignment);
}
@@ -2202,7 +2230,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Handle<String> name) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsProperty() &&
+ ASSERT(lookup.IsFound() &&
(lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
if (lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
@@ -2218,7 +2246,24 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
}
} else {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
- LoadHeapObject(result, Handle<HeapObject>::cast(function));
+ __ LoadHeapObject(result, function);
+ }
+}
+
+
+void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
+ ASSERT(!operand->IsDoubleRegister());
+ if (operand->IsConstantOperand()) {
+ Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
+ if (object->IsSmi()) {
+ __ Push(Handle<Smi>::cast(object));
+ } else {
+ __ PushHeapObject(Handle<HeapObject>::cast(object));
+ }
+ } else if (operand->IsRegister()) {
+ __ push(ToRegister(operand));
+ } else {
+ __ push(ToOperand(operand));
}
}
@@ -2542,15 +2587,10 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
}
-void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
Register scratch = ToRegister(instr->TempAt(0));
- ASSERT(receiver.is(eax)); // Used for parameter count.
- ASSERT(function.is(edi)); // Required by InvokeFunction.
- ASSERT(ToRegister(instr->result()).is(eax));
// If the receiver is null or undefined, we have to pass the global
// object as a receiver to normal functions. Values have to be
@@ -2592,6 +2632,17 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ mov(receiver,
FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
+}
+
+
+void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+ Register receiver = ToRegister(instr->receiver());
+ Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
+ ASSERT(receiver.is(eax)); // Used for parameter count.
+ ASSERT(function.is(edi)); // Required by InvokeFunction.
+ ASSERT(ToRegister(instr->result()).is(eax));
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2617,12 +2668,9 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ bind(&invoke);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
ParameterCount actual(eax);
__ InvokeFunction(function, actual, CALL_FUNCTION,
safepoint_generator, CALL_AS_METHOD);
@@ -2631,17 +2679,13 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->InputAt(0);
- if (argument->IsConstantOperand()) {
- __ push(ToImmediate(argument));
- } else {
- __ push(ToOperand(argument));
- }
+ EmitPushTaggedOperand(argument);
}
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- LoadHeapObject(result, instr->hydrogen()->closure());
+ __ LoadHeapObject(result, instr->hydrogen()->closure());
}
@@ -2659,10 +2703,19 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
}
+void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
+ ASSERT(ToRegister(instr->InputAt(0)).is(esi));
+ __ push(esi); // The context is the first argument.
+ __ push(Immediate(instr->hydrogen()->pairs()));
+ __ push(Immediate(Smi::FromInt(instr->hydrogen()->flags())));
+ CallRuntime(Runtime::kDeclareGlobals, 3, instr);
+}
+
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
- __ mov(result, Operand(context, Context::SlotOffset(instr->qml_global()?Context::QML_GLOBAL_INDEX:Context::GLOBAL_INDEX)));
+ __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
}
@@ -2677,42 +2730,53 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
CallKind call_kind) {
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
- } else {
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- }
-
- // Set eax to arguments count if adaption is not needed. Assumes that eax
- // is available to write to at this point.
- if (!function->NeedsArgumentsAdaption()) {
- __ mov(eax, arity);
- }
+ bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
+ function->shared()->formal_parameter_count() == arity;
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- // Invoke function.
- __ SetCallKind(ecx, call_kind);
- if (*function == *info()->closure()) {
- __ CallSelf();
+ if (can_invoke_directly) {
+ __ LoadHeapObject(edi, function);
+
+ // Change context if needed.
+ bool change_context =
+ (info()->closure()->context() != function->context()) ||
+ scope()->contains_with() ||
+ (scope()->num_heap_slots() > 0);
+
+ if (change_context) {
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+ } else {
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ }
+
+ // Set eax to arguments count if adaption is not needed. Assumes that eax
+ // is available to write to at this point.
+ if (!function->NeedsArgumentsAdaption()) {
+ __ mov(eax, arity);
+ }
+
+ // Invoke function directly.
+ __ SetCallKind(ecx, call_kind);
+ if (*function == *info()->closure()) {
+ __ CallSelf();
+ } else {
+ __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
+ }
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
} else {
- __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
+ // We need to adapt arguments.
+ SafepointGenerator generator(
+ this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(arity);
+ __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
}
-
- // Setup deoptimization.
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
}
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(eax));
- __ mov(edi, instr->function());
CallKnownFunction(instr->function(),
instr->arity(),
instr,
@@ -2886,12 +2950,12 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
__ ucomisd(xmm_scratch, input_reg);
__ j(above, &below_half);
- // input = input + 0.5
- __ addsd(input_reg, xmm_scratch);
+ // xmm_scratch = input + 0.5
+ __ addsd(xmm_scratch, input_reg);
// Compute Math.floor(value + 0.5).
// Use truncating instruction (OK because input is positive).
- __ cvttsd2si(output_reg, Operand(input_reg));
+ __ cvttsd2si(output_reg, Operand(xmm_scratch));
// Overflow is signalled with minint.
__ cmp(output_reg, 0x80000000u);
@@ -2927,72 +2991,146 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
}
-void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
+void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
XMMRegister xmm_scratch = xmm0;
XMMRegister input_reg = ToDoubleRegister(instr->value());
+ Register scratch = ToRegister(instr->temp());
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
+
+ // Note that according to ECMA-262 15.8.2.13:
+ // Math.pow(-Infinity, 0.5) == Infinity
+ // Math.sqrt(-Infinity) == NaN
+ Label done, sqrt;
+ // Check base for -Infinity. According to IEEE-754, single-precision
+ // -Infinity has the highest 9 bits set and the lowest 23 bits cleared.
+ __ mov(scratch, 0xFF800000);
+ __ movd(xmm_scratch, scratch);
+ __ cvtss2sd(xmm_scratch, xmm_scratch);
+ __ ucomisd(input_reg, xmm_scratch);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &sqrt, Label::kNear);
+ __ j(carry, &sqrt, Label::kNear);
+ // If input is -Infinity, return Infinity.
+ __ xorps(input_reg, input_reg);
+ __ subsd(input_reg, xmm_scratch);
+ __ jmp(&done, Label::kNear);
+
+ // Square root.
+ __ bind(&sqrt);
__ xorps(xmm_scratch, xmm_scratch);
__ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
__ sqrtsd(input_reg, input_reg);
+ __ bind(&done);
}
void LCodeGen::DoPower(LPower* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
- DoubleRegister result_reg = ToDoubleRegister(instr->result());
Representation exponent_type = instr->hydrogen()->right()->representation();
-
- if (exponent_type.IsDouble()) {
- // It is safe to use ebx directly since the instruction is marked
- // as a call.
- __ PrepareCallCFunction(4, ebx);
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
- __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
- __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
- 4);
+ // Having marked this as a call, we can use any registers.
+ // Just make sure that the input/output registers are the expected ones.
+ ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
+ ToDoubleRegister(instr->InputAt(1)).is(xmm1));
+ ASSERT(!instr->InputAt(1)->IsRegister() ||
+ ToRegister(instr->InputAt(1)).is(eax));
+ ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
+
+ if (exponent_type.IsTagged()) {
+ Label no_deopt;
+ __ JumpIfSmi(eax, &no_deopt);
+ __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&no_deopt);
+ MathPowStub stub(MathPowStub::TAGGED);
+ __ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- // It is safe to use ebx directly since the instruction is marked
- // as a call.
- ASSERT(!ToRegister(right).is(ebx));
- __ PrepareCallCFunction(4, ebx);
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
- __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
- __ CallCFunction(ExternalReference::power_double_int_function(isolate()),
- 4);
+ MathPowStub stub(MathPowStub::INTEGER);
+ __ CallStub(&stub);
} else {
- ASSERT(exponent_type.IsTagged());
- CpuFeatures::Scope scope(SSE2);
- Register right_reg = ToRegister(right);
-
- Label non_smi, call;
- __ JumpIfNotSmi(right_reg, &non_smi);
- __ SmiUntag(right_reg);
- __ cvtsi2sd(result_reg, Operand(right_reg));
- __ jmp(&call);
-
- __ bind(&non_smi);
- // It is safe to use ebx directly since the instruction is marked
- // as a call.
- ASSERT(!right_reg.is(ebx));
- __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
- DeoptimizeIf(not_equal, instr->environment());
- __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
-
- __ bind(&call);
- __ PrepareCallCFunction(4, ebx);
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
- __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
- __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
- 4);
+ ASSERT(exponent_type.IsDouble());
+ MathPowStub stub(MathPowStub::DOUBLE);
+ __ CallStub(&stub);
}
+}
- // Return value is in st(0) on ia32.
- // Store it into the (fixed) result register.
- __ sub(Operand(esp), Immediate(kDoubleSize));
- __ fstp_d(Operand(esp, 0));
- __ movdbl(result_reg, Operand(esp, 0));
- __ add(Operand(esp), Immediate(kDoubleSize));
+
+void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
+
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+
+ // Having marked this instruction as a call we can use any
+ // registers.
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
+ ASSERT(ToRegister(instr->InputAt(0)).is(eax));
+ // Assert that the register size is indeed the size of each seed.
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
+
+ __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ mov(ebx, FieldOperand(eax, kRandomSeedOffset));
+ // ebx: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ mov(ecx, FieldOperand(ebx, ByteArray::kHeaderSize));
+ // If state[0] == 0, call runtime to initialize seeds.
+ __ test(ecx, ecx);
+ __ j(zero, deferred->entry());
+ // Load state[1].
+ __ mov(eax, FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize));
+ // ecx: state[0]
+ // eax: state[1]
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ movzx_w(edx, ecx);
+ __ imul(edx, edx, 18273);
+ __ shr(ecx, 16);
+ __ add(ecx, edx);
+ // Save state[0].
+ __ mov(FieldOperand(ebx, ByteArray::kHeaderSize), ecx);
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ movzx_w(edx, eax);
+ __ imul(edx, edx, 36969);
+ __ shr(eax, 16);
+ __ add(eax, edx);
+ // Save state[1].
+ __ mov(FieldOperand(ebx, ByteArray::kHeaderSize + kSeedSize), eax);
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ shl(ecx, 14);
+ __ and_(eax, Immediate(0x3FFFF));
+ __ add(eax, ecx);
+
+ __ bind(deferred->exit());
+ // Convert 32 random bits in eax to 0.(32 random bits) in a double
+ // by computing:
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
+ __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
+ __ movd(xmm2, ebx);
+ __ movd(xmm1, eax);
+ __ cvtss2sd(xmm2, xmm2);
+ __ xorps(xmm1, xmm2);
+ __ subsd(xmm1, xmm2);
+}
+
+
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, ebx);
+ __ mov(Operand(esp, 0), eax);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in eax.
}
@@ -3027,6 +3165,14 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
}
+void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::UNTAGGED);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::COS,
@@ -3057,15 +3203,15 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
case kMathSqrt:
DoMathSqrt(instr);
break;
- case kMathPowHalf:
- DoMathPowHalf(instr);
- break;
case kMathCos:
DoMathCos(instr);
break;
case kMathSin:
DoMathSin(instr);
break;
+ case kMathTan:
+ DoMathTan(instr);
+ break;
case kMathLog:
DoMathLog(instr);
break;
@@ -3082,10 +3228,9 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(instr->HasPointerMap());
ASSERT(instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
+ SafepointGenerator generator(
+ this, pointers, Safepoint::kLazyDeopt);
ParameterCount count(instr->arity());
__ InvokeFunction(edi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
}
@@ -3118,12 +3263,12 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
void LCodeGen::DoCallFunction(LCallFunction* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
+ ASSERT(ToRegister(instr->function()).is(edi));
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
- __ Drop(1);
}
@@ -3142,7 +3287,6 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(eax));
- __ mov(edi, instr->target());
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
}
@@ -3152,9 +3296,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
ASSERT(ToRegister(instr->constructor()).is(edi));
ASSERT(ToRegister(instr->result()).is(eax));
- Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
__ Set(eax, Immediate(instr->arity()));
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
+ CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
@@ -3214,7 +3358,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(eax));
__ mov(ecx, instr->name());
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3224,7 +3368,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
if (instr->index()->IsConstantOperand()) {
__ cmp(ToOperand(instr->length()),
- ToImmediate(LConstantOperand::cast(instr->index())));
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
DeoptimizeIf(below_equal, instr->environment());
} else {
__ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
@@ -3278,13 +3422,6 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register elements = ToRegister(instr->object());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- // conversion, so it deopts in that case.
- if (instr->hydrogen()->ValueNeedsSmiCheck()) {
- __ test(value, Immediate(kSmiTagMask));
- DeoptimizeIf(not_zero, instr->environment());
- }
-
// Do the store.
if (instr->key()->IsConstantOperand()) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
@@ -3346,7 +3483,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->key()).is(ecx));
ASSERT(ToRegister(instr->value()).is(eax));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3406,82 +3543,15 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
LStringCharCodeAt* instr_;
};
- Register string = ToRegister(instr->string());
- Register index = ToRegister(instr->index());
- Register result = ToRegister(instr->result());
-
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- // Fetch the instance type of the receiver into result register.
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
-
- // We need special handling for indirect strings.
- Label check_sequential;
- __ test(result, Immediate(kIsIndirectStringMask));
- __ j(zero, &check_sequential, Label::kNear);
-
- // Dispatch on the indirect string shape: slice or cons.
- Label cons_string;
- __ test(result, Immediate(kSlicedNotConsMask));
- __ j(zero, &cons_string, Label::kNear);
-
- // Handle slices.
- Label indirect_string_loaded;
- __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
- __ SmiUntag(result);
- __ add(index, Operand(result));
- __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
- __ jmp(&indirect_string_loaded, Label::kNear);
-
- // Handle conses.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- __ bind(&cons_string);
- __ cmp(FieldOperand(string, ConsString::kSecondOffset),
- Immediate(factory()->empty_string()));
- __ j(not_equal, deferred->entry());
- __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
-
- __ bind(&indirect_string_loaded);
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
-
- // Check whether the string is sequential. The only non-sequential
- // shapes we support have just been unwrapped above.
- __ bind(&check_sequential);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ test(result, Immediate(kStringRepresentationMask));
- __ j(not_zero, deferred->entry());
-
- // Dispatch on the encoding: ASCII or two-byte.
- Label ascii_string;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ test(result, Immediate(kStringEncodingMask));
- __ j(not_zero, &ascii_string, Label::kNear);
-
- // Two-byte string.
- // Load the two-byte character code into the result register.
- Label done;
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- __ movzx_w(result, FieldOperand(string,
- index,
- times_2,
- SeqTwoByteString::kHeaderSize));
- __ jmp(&done, Label::kNear);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ movzx_b(result, FieldOperand(string,
- index,
- times_1,
- SeqAsciiString::kHeaderSize));
- __ bind(&done);
+ StringCharLoadGenerator::Generate(masm(),
+ factory(),
+ ToRegister(instr->string()),
+ ToRegister(instr->index()),
+ ToRegister(instr->result()),
+ deferred->entry());
__ bind(deferred->exit());
}
@@ -3574,16 +3644,8 @@ void LCodeGen::DoStringLength(LStringLength* instr) {
void LCodeGen::DoStringAdd(LStringAdd* instr) {
- if (instr->left()->IsConstantOperand()) {
- __ push(ToImmediate(instr->left()));
- } else {
- __ push(ToOperand(instr->left()));
- }
- if (instr->right()->IsConstantOperand()) {
- __ push(ToImmediate(instr->right()));
- } else {
- __ push(ToOperand(instr->right()));
- }
+ EmitPushTaggedOperand(instr->left());
+ EmitPushTaggedOperand(instr->right());
StringAddStub stub(NO_STRING_CHECK_IN_STUB);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
@@ -3655,7 +3717,7 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
RecordSafepointWithRegisters(
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
+ instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
if (!reg.is(eax)) __ mov(reg, eax);
// Done. Put the value in xmm0 into the value of the allocated heap
@@ -3707,8 +3769,8 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
// not have easy access to the local context.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
- RecordSafepointWithRegisters(instr->pointer_map(), 0,
- Safepoint::kNoDeoptimizationIndex);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
__ StoreToSafepointRegisterSlot(reg, eax);
}
@@ -3733,8 +3795,10 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
void LCodeGen::EmitNumberUntagD(Register input_reg,
+ Register temp_reg,
XMMRegister result_reg,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env) {
Label load_smi, done;
@@ -3763,6 +3827,15 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
}
// Heap number to XMM conversion.
__ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
+ if (deoptimize_on_minus_zero) {
+ XMMRegister xmm_scratch = xmm0;
+ __ xorps(xmm_scratch, xmm_scratch);
+ __ ucomisd(result_reg, xmm_scratch);
+ __ j(not_zero, &done, Label::kNear);
+ __ movmskpd(temp_reg, result_reg);
+ __ test_b(temp_reg, 1);
+ DeoptimizeIf(not_zero, env);
+ }
__ jmp(&done, Label::kNear);
// Smi to XMM conversion
@@ -3806,8 +3879,7 @@ void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
__ cmp(Operand(input_reg), Immediate(kTooBigExponent));
__ j(less, &convert, Label::kNear);
// Pop FPU stack before deoptimizing.
- __ ffree(0);
- __ fincstp();
+ __ fstp(0);
DeoptimizeIf(no_condition, instr->environment());
// Reserve space for 64 bit answer.
@@ -3886,14 +3958,23 @@ void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
+ LOperand* temp = instr->TempAt(0);
+ ASSERT(temp == NULL || temp->IsRegister());
LOperand* result = instr->result();
ASSERT(result->IsDoubleRegister());
Register input_reg = ToRegister(input);
XMMRegister result_reg = ToDoubleRegister(result);
- EmitNumberUntagD(input_reg, result_reg,
+ bool deoptimize_on_minus_zero =
+ instr->hydrogen()->deoptimize_on_minus_zero();
+ Register temp_reg = deoptimize_on_minus_zero ? ToRegister(temp) : no_reg;
+
+ EmitNumberUntagD(input_reg,
+ temp_reg,
+ result_reg,
instr->hydrogen()->deoptimize_on_undefined(),
+ deoptimize_on_minus_zero,
instr->environment());
}
@@ -4067,7 +4148,7 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
} else {
__ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
__ and_(temp, mask);
- __ cmpb(Operand(temp), tag);
+ __ cmp(temp, tag);
DeoptimizeIf(not_equal, instr->environment());
}
}
@@ -4083,19 +4164,38 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
__ cmp(reg, Operand::Cell(cell));
} else {
Operand operand = ToOperand(instr->value());
- __ cmp(operand, instr->hydrogen()->target());
+ __ cmp(operand, target);
}
DeoptimizeIf(not_equal, instr->environment());
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Handle<Map> map,
+ CompareMapMode mode,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMap(reg, map, &success, mode);
+ DeoptimizeIf(not_equal, env);
+ __ bind(&success);
+}
+
+
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- instr->hydrogen()->map());
- DeoptimizeIf(not_equal, instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
+ __ j(equal, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4147,17 +4247,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
- if (isolate()->heap()->InNewSpace(*object)) {
- Handle<JSGlobalPropertyCell> cell =
- isolate()->factory()->NewJSGlobalPropertyCell(object);
- __ mov(result, Operand::Cell(cell));
- } else {
- __ mov(result, object);
- }
-}
-
-
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Register reg = ToRegister(instr->TempAt(0));
@@ -4165,39 +4254,150 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
- LoadHeapObject(reg, current_prototype);
+ __ LoadHeapObject(reg, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
+
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
- LoadHeapObject(reg, current_prototype);
+ __ LoadHeapObject(reg, current_prototype);
}
// Check the holder map.
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
+}
+
+
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ no_reg,
+ scratch,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(scratch, constructor);
+ __ mov(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
+
+ if (FLAG_debug_code) {
+ __ AbortIfSmi(map);
+ __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
+ instance_size >> kPointerSizeLog2);
+ __ Assert(equal, "Unexpected instance size");
+ __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
+ initial_map->pre_allocated_property_fields());
+ __ Assert(equal, "Unexpected pre-allocated property fields count");
+ __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
+ initial_map->unused_property_fields());
+ __ Assert(equal, "Unexpected unused property fields count");
+ __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
+ initial_map->inobject_properties());
+ __ Assert(equal, "Unexpected in-object property fields count");
+ }
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ mov(FieldOperand(result, JSObject::kMapOffset), map);
+ __ mov(scratch, factory()->empty_fixed_array());
+ __ mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
+ __ mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
+ if (initial_map->inobject_properties() != 0) {
+ __ mov(scratch, factory()->undefined_value());
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ mov(FieldOperand(result, property_offset), scratch);
+ }
+ }
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ Set(result, Immediate(0));
+
+ PushSafepointRegistersScope scope(this);
+ __ push(Immediate(Smi::FromInt(instance_size)));
+ CallRuntimeFromDeferred(
+ Runtime::kAllocateInNewSpace, 1, instr, instr->context());
+ __ StoreToSafepointRegisterSlot(result, eax);
}
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
+ Heap* heap = isolate()->heap();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate_elements_kind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to FAST_ELEMENTS.
+ if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ __ LoadHeapObject(eax, instr->hydrogen()->boilerplate_object());
+ __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
+ // Load the map's "bit field 2". We only need the first byte,
+ // but the following masking takes care of that anyway.
+ __ mov(ebx, FieldOperand(ebx, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ and_(ebx, Map::kElementsKindMask);
+ __ cmp(ebx, boilerplate_elements_kind << Map::kElementsKindShift);
+ DeoptimizeIf(not_equal, instr->environment());
+ }
- Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
- ASSERT_EQ(2, constant_elements->length());
- ElementsKind constant_elements_kind =
- static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
-
- // Setup the parameters to the stub/runtime call.
+ // Set up the parameters to the stub/runtime call.
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ push(Immediate(constant_elements));
+ // Boilerplate already exists, constant elements are never accessed.
+ // Pass an empty fixed array.
+ __ push(Immediate(Handle<FixedArray>(heap->empty_fixed_array())));
// Pick the right runtime function or stub to call.
int length = instr->hydrogen()->length();
@@ -4213,22 +4413,150 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset) {
+ ASSERT(!source.is(ecx));
+ ASSERT(!result.is(ecx));
+
+ if (FLAG_debug_code) {
+ __ LoadHeapObject(ecx, object);
+ __ cmp(source, ecx);
+ __ Assert(equal, "Unexpected object literal boilerplate");
+ }
+
+ // Only elements backing stores for non-COW arrays need to be copied.
+ Handle<FixedArrayBase> elements(object->elements());
+ bool has_elements = elements->length() > 0 &&
+ elements->map() != isolate()->heap()->fixed_cow_array_map();
+
+ // Increase the offset so that subsequent objects end up right after
+ // this object and its backing store.
+ int object_offset = *offset;
+ int object_size = object->map()->instance_size();
+ int elements_offset = *offset + object_size;
+ int elements_size = has_elements ? elements->Size() : 0;
+ *offset += object_size + elements_size;
+
+ // Copy object header.
+ ASSERT(object->properties()->length() == 0);
+ int inobject_properties = object->map()->inobject_properties();
+ int header_size = object_size - inobject_properties * kPointerSize;
+ for (int i = 0; i < header_size; i += kPointerSize) {
+ if (has_elements && i == JSObject::kElementsOffset) {
+ __ lea(ecx, Operand(result, elements_offset));
+ } else {
+ __ mov(ecx, FieldOperand(source, i));
+ }
+ __ mov(FieldOperand(result, object_offset + i), ecx);
+ }
+
+ // Copy in-object properties.
+ for (int i = 0; i < inobject_properties; i++) {
+ int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
+ Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ lea(ecx, Operand(result, *offset));
+ __ mov(FieldOperand(result, total_offset), ecx);
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
+ __ mov(FieldOperand(result, total_offset), ecx);
+ } else {
+ __ mov(FieldOperand(result, total_offset), Immediate(value));
+ }
+ }
+
+ if (has_elements) {
+ // Copy elements backing store header.
+ __ LoadHeapObject(source, elements);
+ for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
+ __ mov(ecx, FieldOperand(source, i));
+ __ mov(FieldOperand(result, elements_offset + i), ecx);
+ }
+
+ // Copy elements backing store content.
+ int elements_length = elements->length();
+ if (elements->IsFixedDoubleArray()) {
+ Handle<FixedDoubleArray> double_array =
+ Handle<FixedDoubleArray>::cast(elements);
+ for (int i = 0; i < elements_length; i++) {
+ int64_t value = double_array->get_representation(i);
+ int32_t value_low = value & 0xFFFFFFFF;
+ int32_t value_high = value >> 32;
+ int total_offset =
+ elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
+ __ mov(FieldOperand(result, total_offset), Immediate(value_low));
+ __ mov(FieldOperand(result, total_offset + 4), Immediate(value_high));
+ }
+ } else if (elements->IsFixedArray()) {
+ for (int i = 0; i < elements_length; i++) {
+ int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
+ Handle<Object> value = JSObject::GetElement(object, i);
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ lea(ecx, Operand(result, *offset));
+ __ mov(FieldOperand(result, total_offset), ecx);
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(ecx, Handle<HeapObject>::cast(value));
+ __ mov(FieldOperand(result, total_offset), ecx);
+ } else {
+ __ mov(FieldOperand(result, total_offset), Immediate(value));
+ }
+ }
+ } else {
+ UNREACHABLE();
+ }
+ }
+}
+
+
+void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
+ ASSERT(ToRegister(instr->context()).is(esi));
+ int size = instr->hydrogen()->total_size();
+
+ // Allocate all objects that are part of the literal in one big
+ // allocation. This avoids multiple limit checks.
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ push(Immediate(Smi::FromInt(size)));
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+
+ __ bind(&allocated);
+ int offset = 0;
+ __ LoadHeapObject(ebx, instr->hydrogen()->boilerplate());
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), eax, ebx, &offset);
+ ASSERT_EQ(size, offset);
+}
+
+
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
- // Setup the parameters to the stub/runtime call.
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> constant_properties =
+ instr->hydrogen()->constant_properties();
+
+ // Set up the parameters to the stub/runtime call.
+ __ PushHeapObject(literals);
__ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ push(Immediate(instr->hydrogen()->constant_properties()));
+ __ push(Immediate(constant_properties));
int flags = instr->hydrogen()->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
@@ -4237,11 +4565,16 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
: ObjectLiteral::kNoFlags;
__ push(Immediate(Smi::FromInt(flags)));
- // Pick the right runtime function to call.
+ // Pick the right runtime function or stub to call.
+ int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@@ -4314,11 +4647,11 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0) {
- FastNewClosureStub stub(shared_info->strict_mode_flag());
+ FastNewClosureStub stub(shared_info->language_mode());
__ push(Immediate(shared_info));
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else {
- __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ push(esi);
__ push(Immediate(shared_info));
__ push(Immediate(pretenure
? factory()->true_value()
@@ -4330,11 +4663,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
void LCodeGen::DoTypeof(LTypeof* instr) {
LOperand* input = instr->InputAt(1);
- if (input->IsConstantOperand()) {
- __ push(ToImmediate(input));
- } else {
- __ push(ToOperand(input));
- }
+ EmitPushTaggedOperand(input);
CallRuntime(Runtime::kTypeof, 1, instr);
}
@@ -4451,9 +4780,25 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
}
+void LCodeGen::EnsureSpaceForLazyDeopt() {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ int patch_size = Deoptimizer::patch_size();
+ if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
+ __ Nop(padding_size);
+ }
+ last_lazy_deopt_pc_ = masm()->pc_offset();
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
- // No code for lazy bailout instruction. Used to capture environment after a
- // call for populating the safepoint data with deoptimization data.
+ EnsureSpaceForLazyDeopt();
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4466,39 +4811,29 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
LOperand* obj = instr->object();
LOperand* key = instr->key();
__ push(ToOperand(obj));
- if (key->IsConstantOperand()) {
- __ push(ToImmediate(key));
- } else {
- __ push(ToOperand(key));
- }
+ EmitPushTaggedOperand(key);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
// Create safepoint generator that will also ensure enough space in the
// reloc info for patching in deoptimization (since this is invoking a
// builtin)
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
}
void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
- {
- PushSafepointRegistersScope scope(this);
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
- RegisterLazyDeoptimization(
- instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- }
-
- // The gap code includes the restoring of the safepoint registers.
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
+ PushSafepointRegistersScope scope(this);
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
+ RecordSafepointWithLazyDeopt(
+ instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4513,6 +4848,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ // There is no LLazyBailout instruction for stack-checks. We have to
+ // prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
@@ -4525,7 +4864,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ASSERT(ToRegister(instr->context()).is(esi));
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ EnsureSpaceForLazyDeopt();
__ bind(&done);
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
@@ -4535,8 +4877,13 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(below, deferred_stack_check->entry());
+ EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ // Don't record a deoptimization index for the safepoint here.
+ // This will be done explicitly when emitting call and the safepoint in
+ // the deferred code.
}
}
@@ -4552,7 +4899,7 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
// If the environment were already registered, we would have no way of
// backpatching it with the spill slot operands.
ASSERT(!environment->HasBeenRegistered());
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(osr_pc_offset_ == -1);
osr_pc_offset_ = masm()->pc_offset();
}
@@ -4561,31 +4908,95 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
void LCodeGen::DoIn(LIn* instr) {
LOperand* obj = instr->object();
LOperand* key = instr->key();
- if (key->IsConstantOperand()) {
- __ push(ToImmediate(key));
- } else {
- __ push(ToOperand(key));
- }
- if (obj->IsConstantOperand()) {
- __ push(ToImmediate(obj));
- } else {
- __ push(ToOperand(obj));
- }
+ EmitPushTaggedOperand(key);
+ EmitPushTaggedOperand(obj);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- // Create safepoint generator that will also ensure enough space in the
- // reloc info for patching in deoptimization (since this is invoking a
- // builtin)
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
}
+void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
+ __ cmp(eax, isolate()->factory()->undefined_value());
+ DeoptimizeIf(equal, instr->environment());
+
+ __ cmp(eax, isolate()->factory()->null_value());
+ DeoptimizeIf(equal, instr->environment());
+
+ __ test(eax, Immediate(kSmiTagMask));
+ DeoptimizeIf(zero, instr->environment());
+
+ STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+ __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
+ DeoptimizeIf(below_equal, instr->environment());
+
+ Label use_cache, call_runtime;
+ __ CheckEnumCache(&call_runtime);
+
+ __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
+ __ jmp(&use_cache, Label::kNear);
+
+ // Get the set of properties to enumerate.
+ __ bind(&call_runtime);
+ __ push(eax);
+ CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
+
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
+ isolate()->factory()->meta_map());
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&use_cache);
+}
+
+
+void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
+ Register map = ToRegister(instr->map());
+ Register result = ToRegister(instr->result());
+ __ LoadInstanceDescriptors(map, result);
+ __ mov(result,
+ FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ __ mov(result,
+ FieldOperand(result, FixedArray::SizeFor(instr->idx())));
+ __ test(result, result);
+ DeoptimizeIf(equal, instr->environment());
+}
+
+
+void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
+ Register object = ToRegister(instr->value());
+ __ cmp(ToRegister(instr->map()),
+ FieldOperand(object, HeapObject::kMapOffset));
+ DeoptimizeIf(not_equal, instr->environment());
+}
+
+
+void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
+ Register object = ToRegister(instr->object());
+ Register index = ToRegister(instr->index());
+
+ Label out_of_object, done;
+ __ cmp(index, Immediate(0));
+ __ j(less, &out_of_object);
+ __ mov(object, FieldOperand(object,
+ index,
+ times_half_pointer_size,
+ JSObject::kHeaderSize));
+ __ jmp(&done, Label::kNear);
+
+ __ bind(&out_of_object);
+ __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset));
+ __ neg(index);
+ // Index is now equal to out of object property index plus 1.
+ __ mov(object, FieldOperand(object,
+ index,
+ times_half_pointer_size,
+ FixedArray::kHeaderSize - kPointerSize));
+ __ bind(&done);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
index 412e418..52befc6 100644
--- a/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
+++ b/src/3rdparty/v8/src/ia32/lithium-codegen-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,10 +58,9 @@ class LCodeGen BASE_EMBEDDED {
inlined_function_count_(0),
scope_(info->scope()),
status_(UNUSED),
- dynamic_frame_alignment_(false),
deferred_(8),
osr_pc_offset_(-1),
- deoptimization_reloc_size(),
+ last_lazy_deopt_pc_(0),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -78,7 +77,13 @@ class LCodeGen BASE_EMBEDDED {
Operand ToOperand(LOperand* op) const;
Register ToRegister(LOperand* op) const;
XMMRegister ToDoubleRegister(LOperand* op) const;
- Immediate ToImmediate(LOperand* op);
+
+ bool IsInteger32(LConstantOperand* op) const;
+ Immediate ToInteger32Immediate(LOperand* op) const {
+ return Immediate(ToInteger32(LConstantOperand::cast(op)));
+ }
+
+ Handle<Object> ToHandle(LConstantOperand* op) const;
// The operand denoting the second word (the one with a higher address) of
// a double stack slot.
@@ -99,10 +104,15 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
- void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
+ void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check);
+
+ void DoCheckMapCommon(Register reg, Handle<Map> map,
+ CompareMapMode mode, LEnvironment* env);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -132,11 +142,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
StrictModeFlag strict_mode_flag() const {
- return info()->strict_mode_flag();
- }
- bool dynamic_frame_alignment() const { return dynamic_frame_alignment_; }
- void set_dynamic_frame_alignment(bool value) {
- dynamic_frame_alignment_ = value;
+ return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
LChunk* chunk() const { return chunk_; }
@@ -144,7 +150,6 @@ class LCodeGen BASE_EMBEDDED {
HGraph* graph() const { return chunk_->graph(); }
int GetNextEmittedBlock(int block);
- LInstruction* GetNextInstruction();
void EmitClassOfTest(Label* if_true,
Label* if_false,
@@ -208,12 +213,11 @@ class LCodeGen BASE_EMBEDDED {
LInstruction* instr,
CallKind call_kind);
- void LoadHeapObject(Register result, Handle<HeapObject> object);
+ void RecordSafepointWithLazyDeopt(LInstruction* instr,
+ SafepointMode safepoint_mode);
- void RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode);
-
- void RegisterEnvironmentForDeoptimization(LEnvironment* environment);
+ void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
void AddToTranslation(Translation* translation,
@@ -227,6 +231,7 @@ class LCodeGen BASE_EMBEDDED {
Register ToRegister(int index) const;
XMMRegister ToDoubleRegister(int index) const;
int ToInteger32(LConstantOperand* op) const;
+
double ToDouble(LConstantOperand* op) const;
Operand BuildFastArrayOperand(LOperand* elements_pointer,
LOperand* key,
@@ -239,8 +244,8 @@ class LCodeGen BASE_EMBEDDED {
void DoMathFloor(LUnaryMathOperation* instr);
void DoMathRound(LUnaryMathOperation* instr);
void DoMathSqrt(LUnaryMathOperation* instr);
- void DoMathPowHalf(LUnaryMathOperation* instr);
void DoMathLog(LUnaryMathOperation* instr);
+ void DoMathTan(LUnaryMathOperation* instr);
void DoMathCos(LUnaryMathOperation* instr);
void DoMathSin(LUnaryMathOperation* instr);
@@ -248,23 +253,22 @@ class LCodeGen BASE_EMBEDDED {
void RecordSafepoint(LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index);
- void RecordSafepoint(LPointerMap* pointers, int deoptimization_index);
- void RecordSafepoint(int deoptimization_index);
+ Safepoint::DeoptMode mode);
+ void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
+ void RecordSafepoint(Safepoint::DeoptMode mode);
void RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordPosition(int position);
- int LastSafepointEnd() {
- return static_cast<int>(safepoints_.GetPcAfterGap());
- }
static Condition TokenToCondition(Token::Value op, bool is_unsigned);
void EmitGoto(int block);
void EmitBranch(int left_block, int right_block, Condition cc);
void EmitNumberUntagD(Register input,
+ Register temp,
XMMRegister result,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
@@ -283,6 +287,13 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsString(x). Preserves input register.
+ // Returns the condition on which a final split to
+ // true and false label should be made, to optimize fallthrough.
+ Condition EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string);
+
// Emits optimized code for %_IsConstructCall().
// Caller should branch on equal condition.
void EmitIsConstructCall(Register temp);
@@ -292,6 +303,19 @@ class LCodeGen BASE_EMBEDDED {
Handle<Map> type,
Handle<String> name);
+ // Emits optimized code to deep-copy the contents of statically known
+ // object graphs (e.g. object literal boilerplate).
+ void EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset);
+
+ void EnsureSpaceForLazyDeopt();
+
+ // Emits code for pushing either a tagged constant, a (non-double)
+ // register, or a stack slot operand.
+ void EmitPushTaggedOperand(LOperand* operand);
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -304,17 +328,10 @@ class LCodeGen BASE_EMBEDDED {
int inlined_function_count_;
Scope* const scope_;
Status status_;
- bool dynamic_frame_alignment_;
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
int osr_pc_offset_;
-
- struct DeoptimizationRelocSize {
- int min_size;
- int last_pc_offset;
- };
-
- DeoptimizationRelocSize deoptimization_reloc_size;
+ int last_lazy_deopt_pc_;
// Builder that keeps track of safepoints in the code. The table
// itself is emitted at the end of the generated code.
@@ -364,7 +381,7 @@ class LDeferredCode: public ZoneObject {
virtual void Generate() = 0;
virtual LInstruction* instr() = 0;
- void SetExit(Label *exit) { external_exit_ = exit; }
+ void SetExit(Label* exit) { external_exit_ = exit; }
Label* entry() { return &entry_; }
Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
int instruction_index() const { return instruction_index_; }
diff --git a/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
index fcf1f91..510d9f1 100644
--- a/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-gap-resolver-ia32.cc
@@ -303,14 +303,24 @@ void LGapResolver::EmitMove(int index) {
}
} else if (source->IsConstantOperand()) {
- ASSERT(destination->IsRegister() || destination->IsStackSlot());
- Immediate src = cgen_->ToImmediate(source);
+ LConstantOperand* constant_source = LConstantOperand::cast(source);
if (destination->IsRegister()) {
Register dst = cgen_->ToRegister(destination);
- __ Set(dst, src);
+ if (cgen_->IsInteger32(constant_source)) {
+ __ Set(dst, cgen_->ToInteger32Immediate(constant_source));
+ } else {
+ __ LoadObject(dst, cgen_->ToHandle(constant_source));
+ }
} else {
+ ASSERT(destination->IsStackSlot());
Operand dst = cgen_->ToOperand(destination);
- __ Set(dst, src);
+ if (cgen_->IsInteger32(constant_source)) {
+ __ Set(dst, cgen_->ToInteger32Immediate(constant_source));
+ } else {
+ Register tmp = EnsureTempRegister();
+ __ LoadObject(tmp, cgen_->ToHandle(constant_source));
+ __ mov(dst, tmp);
+ }
}
} else if (source->IsDoubleRegister()) {
diff --git a/src/3rdparty/v8/src/ia32/lithium-ia32.cc b/src/3rdparty/v8/src/ia32/lithium-ia32.cc
index bce3fc8..7bb864f 100644
--- a/src/3rdparty/v8/src/ia32/lithium-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/lithium-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -225,6 +225,13 @@ void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
}
+void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if is_string(");
+ InputAt(0)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
InputAt(0)->PrintTo(stream);
@@ -239,6 +246,14 @@ void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
}
+void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if string_compare(");
+ InputAt(1)->PrintTo(stream);
+ InputAt(2)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
InputAt(0)->PrintTo(stream);
@@ -283,6 +298,12 @@ void LUnaryMathOperation::PrintDataTo(StringStream* stream) {
}
+void LMathPowHalf::PrintDataTo(StringStream* stream) {
+ stream->Add("/pow_half ");
+ InputAt(0)->PrintTo(stream);
+}
+
+
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
InputAt(0)->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -347,11 +368,7 @@ void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
int LChunk::GetNextSpillIndex(bool is_double) {
// Skip a slot if for a double-width slot.
- if (is_double) {
- spill_slot_count_ |= 1; // Make it odd, so incrementing makes it even.
- spill_slot_count_++;
- num_double_slots_++;
- }
+ if (is_double) spill_slot_count_++;
return spill_slot_count_++;
}
@@ -367,7 +384,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -530,7 +547,7 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
chunk_ = new(zone()) LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -559,11 +576,6 @@ void LChunkBuilder::Abort(const char* format, ...) {
}
-LRegister* LChunkBuilder::ToOperand(Register reg) {
- return LRegister::Create(Register::ToAllocationIndex(reg));
-}
-
-
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
Register::ToAllocationIndex(reg));
@@ -654,7 +666,7 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
HInstruction* instr = HInstruction::cast(value);
VisitInstruction(instr);
}
- allocator_->RecordUse(value, operand);
+ operand->set_virtual_register(value->id());
return operand;
}
@@ -662,19 +674,13 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
template<int I, int T>
LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result) {
- allocator_->RecordDefinition(current_instruction_, result);
+ result->set_virtual_register(current_instruction_->id());
instr->set_result(result);
return instr;
}
template<int I, int T>
-LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new(zone()) LUnallocated(LUnallocated::NONE));
-}
-
-
-template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
return Define(instr,
@@ -786,21 +792,24 @@ LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
LUnallocated* LChunkBuilder::TempRegister() {
LUnallocated* operand =
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
- allocator_->RecordTemporary(operand);
+ operand->set_virtual_register(allocator_->GetVirtualRegister());
+ if (!allocator_->AllocationOk()) {
+ Abort("Not enough virtual registers (temps).");
+ }
return operand;
}
LOperand* LChunkBuilder::FixedTemp(Register reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
@@ -992,15 +1001,18 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber);
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
LEnvironment* result =
new(zone()) LEnvironment(hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
ast_id,
hydrogen_env->parameter_count(),
argument_count_,
value_count,
outer);
+ int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1009,13 +1021,17 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new(zone()) LArgument((*argument_index_accumulator)++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
+ *argument_index_accumulator = argument_index;
+ }
+
return result;
}
@@ -1026,22 +1042,31 @@ LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
- HValue* v = instr->value();
- if (v->EmitAtUses()) {
- ASSERT(v->IsConstant());
- ASSERT(!v->representation().IsDouble());
- HBasicBlock* successor = HConstant::cast(v)->ToBoolean()
+ HValue* value = instr->value();
+ if (value->EmitAtUses()) {
+ ASSERT(value->IsConstant());
+ ASSERT(!value->representation().IsDouble());
+ HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
return new(zone()) LGoto(successor->block_id());
}
+
+ // Untagged integers or doubles, smis and booleans don't require a
+ // deoptimization environment nor a temp register.
+ Representation rep = value->representation();
+ HType type = value->type();
+ if (!rep.IsTagged() || type.IsSmi() || type.IsBoolean()) {
+ return new(zone()) LBranch(UseRegister(value), NULL);
+ }
+
ToBooleanStub::Types expected = instr->expected_input_types();
// We need a temporary register when we have to access the map *or* we have
// no type info yet, in which case we handle all cases (including the ones
// involving maps).
bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
LOperand* temp = needs_temp ? TempRegister() : NULL;
- return AssignEnvironment(new(zone()) LBranch(UseRegister(v), temp));
+ return AssignEnvironment(new(zone()) LBranch(UseRegister(value), temp));
}
@@ -1082,17 +1107,25 @@ LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
}
+LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
+ LOperand* receiver = UseRegister(instr->receiver());
+ LOperand* function = UseRegisterAtStart(instr->function());
+ LOperand* temp = TempRegister();
+ LWrapReceiver* result =
+ new(zone()) LWrapReceiver(receiver, function, temp);
+ return AssignEnvironment(DefineSameAsFirst(result));
+}
+
+
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), edi);
LOperand* receiver = UseFixed(instr->receiver(), eax);
LOperand* length = UseFixed(instr->length(), ebx);
LOperand* elements = UseFixed(instr->elements(), ecx);
- LOperand* temp = FixedTemp(edx);
LApplyArguments* result = new(zone()) LApplyArguments(function,
receiver,
length,
- elements,
- temp);
+ elements);
return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1122,9 +1155,15 @@ LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
}
+LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
+ return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
+}
+
+
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LGlobalObject(context, instr->qml_global()));
+ return DefineAsRegister(new(zone()) LGlobalObject(context));
}
@@ -1160,7 +1199,7 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
input);
return DefineSameAsFirst(result);
- } else if (op == kMathSin || op == kMathCos) {
+ } else if (op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* context = UseFixed(instr->context(), esi);
LOperand* input = UseFixedDouble(instr->value(), xmm1);
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
@@ -1169,6 +1208,11 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
} else {
LOperand* input = UseRegisterAtStart(instr->value());
LOperand* context = UseAny(instr->context()); // Deferred use by MathAbs.
+ if (op == kMathPowHalf) {
+ LOperand* temp = TempRegister();
+ LMathPowHalf* result = new(zone()) LMathPowHalf(context, input, temp);
+ return DefineSameAsFirst(result);
+ }
LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(context,
input);
switch (op) {
@@ -1180,8 +1224,6 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
return AssignEnvironment(DefineAsRegister(result));
case kMathSqrt:
return DefineSameAsFirst(result);
- case kMathPowHalf:
- return DefineSameAsFirst(result);
default:
UNREACHABLE();
return NULL;
@@ -1211,7 +1253,7 @@ LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
LOperand* context = UseFixed(instr->context(), esi);
argument_count_ -= instr->argument_count();
- LCallGlobal* result = new(zone()) LCallGlobal(context, instr->qml_global());
+ LCallGlobal* result = new(zone()) LCallGlobal(context);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1233,8 +1275,9 @@ LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* function = UseFixed(instr->function(), edi);
argument_count_ -= instr->argument_count();
- LCallFunction* result = new(zone()) LCallFunction(context);
+ LCallFunction* result = new(zone()) LCallFunction(context, function);
return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1363,7 +1406,11 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
temp = TempRegister();
}
LMulI* mul = new(zone()) LMulI(left, right, temp);
- return AssignEnvironment(DefineSameAsFirst(mul));
+ if (instr->CheckFlag(HValue::kCanOverflow) ||
+ instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ AssignEnvironment(mul);
+ }
+ return DefineSameAsFirst(mul);
} else if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::MUL, instr);
} else {
@@ -1421,9 +1468,9 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
// We need to use fixed result register for the call.
Representation exponent_type = instr->right()->representation();
ASSERT(instr->left()->representation().IsDouble());
- LOperand* left = UseFixedDouble(instr->left(), xmm1);
+ LOperand* left = UseFixedDouble(instr->left(), xmm2);
LOperand* right = exponent_type.IsDouble() ?
- UseFixedDouble(instr->right(), xmm2) :
+ UseFixedDouble(instr->right(), xmm1) :
UseFixed(instr->right(), eax);
LPower* result = new(zone()) LPower(left, right);
return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
@@ -1431,6 +1478,15 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
}
+LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->global_object()->representation().IsTagged());
+ LOperand* global_object = UseFixed(instr->global_object(), eax);
+ LRandom* result = new(zone()) LRandom(global_object);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
+}
+
+
LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
@@ -1498,6 +1554,13 @@ LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
}
+LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* temp = TempRegister();
+ return new LIsStringAndBranch(UseRegister(instr->value()), temp);
+}
+
+
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
return new(zone()) LIsSmiAndBranch(Use(instr->value()));
@@ -1512,6 +1575,21 @@ LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
}
+LInstruction* LChunkBuilder::DoStringCompareAndBranch(
+ HStringCompareAndBranch* instr) {
+ ASSERT(instr->left()->representation().IsTagged());
+ ASSERT(instr->right()->representation().IsTagged());
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* left = UseFixed(instr->left(), edx);
+ LOperand* right = UseFixed(instr->right(), eax);
+
+ LStringCompareAndBranch* result = new
+ LStringCompareAndBranch(context, left, right);
+
+ return MarkAsCall(result, instr);
+}
+
+
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
@@ -1541,9 +1619,9 @@ LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new(zone()) LClassOfTestAndBranch(UseTempRegister(instr->value()),
- TempRegister(),
- TempRegister());
+ return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
+ TempRegister(),
+ TempRegister());
}
@@ -1569,7 +1647,15 @@ LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
LValueOf* result = new(zone()) LValueOf(object, TempRegister());
- return AssignEnvironment(DefineSameAsFirst(result));
+ return DefineSameAsFirst(result);
+}
+
+
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* date = UseFixed(instr->value(), eax);
+ LDateField* result =
+ new(zone()) LDateField(date, FixedTemp(ecx), instr->index());
+ return MarkAsCall(DefineFixed(result, eax), instr);
}
@@ -1613,7 +1699,11 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new(zone()) LNumberUntagD(value);
+ // Temp register only necessary for minus zero check.
+ LOperand* temp = instr->deoptimize_on_minus_zero()
+ ? TempRegister()
+ : NULL;
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value, temp);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
@@ -1710,9 +1800,9 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckMap* result = new(zone()) LCheckMap(value);
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -1738,34 +1828,6 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
-
- LInstruction* result;
- if (input_rep.IsDouble()) {
- LOperand* reg = UseRegister(value);
- LOperand* temp_reg =
- CpuFeatures::IsSupported(SSE3) ? NULL : TempRegister();
- result = DefineAsRegister(new(zone()) LDoubleToI(reg, temp_reg));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* reg = UseRegister(value);
- // Register allocator doesn't (yet) support allocation of double
- // temps. Reserve xmm1 explicitly.
- LOperand* xmm_temp =
- CpuFeatures::IsSupported(SSE3) ? NULL : FixedTemp(xmm1);
- result = DefineSameAsFirst(new(zone()) LTaggedToI(reg, xmm_temp));
- }
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
return new(zone()) LReturn(UseFixed(instr->value(), eax));
}
@@ -1809,9 +1871,7 @@ LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
LStoreGlobalCell* result =
- new(zone()) LStoreGlobalCell(UseTempRegister(instr->value()),
- TempRegister(),
- TempRegister());
+ new(zone()) LStoreGlobalCell(UseRegister(instr->value()));
return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1828,7 +1888,9 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1843,7 +1905,8 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
value = UseRegister(instr->value());
temp = NULL;
}
- return new(zone()) LStoreContextSlot(context, value, temp);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1857,13 +1920,14 @@ LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
HLoadNamedFieldPolymorphic* instr) {
ASSERT(instr->representation().IsTagged());
- LOperand* context = UseFixed(instr->context(), esi);
if (instr->need_generic()) {
+ LOperand* context = UseFixed(instr->context(), esi);
LOperand* obj = UseFixed(instr->object(), eax);
LLoadNamedFieldPolymorphic* result =
new(zone()) LLoadNamedFieldPolymorphic(context, obj);
return MarkAsCall(DefineFixed(result, eax), instr);
} else {
+ LOperand* context = UseAny(instr->context()); // Not actually used.
LOperand* obj = UseRegisterAtStart(instr->object());
LLoadNamedFieldPolymorphic* result =
new(zone()) LLoadNamedFieldPolymorphic(context, obj);
@@ -1908,7 +1972,8 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineAsRegister(result));
+ if (instr->RequiresHoleCheck()) AssignEnvironment(result);
+ return DefineAsRegister(result);
}
@@ -1927,12 +1992,11 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
HLoadKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
- Representation representation(instr->representation());
ASSERT(
- (representation.IsInteger32() &&
+ (instr->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->key()->representation().IsInteger32());
@@ -1975,8 +2039,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
-
- return AssignEnvironment(new(zone()) LStoreKeyedFastElement(obj, key, val));
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1996,13 +2059,12 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
HStoreKeyedSpecializedArrayElement* instr) {
- Representation representation(instr->value()->representation());
ElementsKind elements_kind = instr->elements_kind();
ASSERT(
- (representation.IsInteger32() &&
+ (instr->value()->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->value()->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->external_pointer()->representation().IsExternal());
@@ -2136,6 +2198,21 @@ LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
}
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* temp = TempRegister();
+ LAllocateObject* result = new(zone()) LAllocateObject(context, temp);
+ return AssignPointerMap(DefineAsRegister(result));
+}
+
+
+LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
+ return MarkAsCall(
+ DefineFixed(new(zone()) LFastLiteral(context), eax), instr);
+}
+
+
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
LOperand* context = UseFixed(instr->context(), esi);
return MarkAsCall(
@@ -2298,9 +2375,14 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
HEnvironment* outer = current_block_->last_environment();
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
+ instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
+ if (instr->arguments() != NULL) {
+ inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ }
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2308,7 +2390,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
- HEnvironment* outer = current_block_->last_environment()->outer();
+ HEnvironment* outer = current_block_->last_environment()->
+ DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
return NULL;
}
@@ -2323,6 +2406,35 @@ LInstruction* LChunkBuilder::DoIn(HIn* instr) {
}
+LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
+ LOperand* context = UseFixed(instr->context(), esi);
+ LOperand* object = UseFixed(instr->enumerable(), eax);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
+ return MarkAsCall(DefineFixed(result, eax), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
+ LOperand* map = UseRegister(instr->map());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LForInCacheArray(map)));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* map = UseRegisterAtStart(instr->map());
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
+ LOperand* object = UseRegister(instr->object());
+ LOperand* index = UseTempRegister(instr->index());
+ return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32
diff --git a/src/3rdparty/v8/src/ia32/lithium-ia32.h b/src/3rdparty/v8/src/ia32/lithium-ia32.h
index 820d188..0cfed12 100644
--- a/src/3rdparty/v8/src/ia32/lithium-ia32.h
+++ b/src/3rdparty/v8/src/ia32/lithium-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,6 +43,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -64,7 +65,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -81,11 +82,13 @@ class LCodeGen;
V(ConstantI) \
V(ConstantT) \
V(Context) \
+ V(DeclareGlobals) \
V(DeleteProperty) \
V(Deoptimize) \
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
+ V(FastLiteral) \
V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
@@ -103,8 +106,10 @@ class LCodeGen;
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
V(IsObjectAndBranch) \
+ V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
+ V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -121,6 +126,7 @@ class LCodeGen;
V(LoadNamedField) \
V(LoadNamedFieldPolymorphic) \
V(LoadNamedGeneric) \
+ V(MathPowHalf) \
V(ModI) \
V(MulI) \
V(NumberTagD) \
@@ -131,6 +137,7 @@ class LCodeGen;
V(OuterContext) \
V(Parameter) \
V(Power) \
+ V(Random) \
V(PushArgument) \
V(RegExpLiteral) \
V(Return) \
@@ -161,7 +168,13 @@ class LCodeGen;
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
- V(ValueOf)
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex) \
+ V(DateField) \
+ V(WrapReceiver)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -366,17 +379,7 @@ class LGoto: public LTemplateInstruction<0, 0, 0> {
class LLazyBailout: public LTemplateInstruction<0, 0, 0> {
public:
- LLazyBailout() : gap_instructions_size_(0) { }
-
DECLARE_CONCRETE_INSTRUCTION(LazyBailout, "lazy-bailout")
-
- void set_gap_instructions_size(int gap_instructions_size) {
- gap_instructions_size_ = gap_instructions_size;
- }
- int gap_instructions_size() { return gap_instructions_size_; }
-
- private:
- int gap_instructions_size_;
};
@@ -454,18 +457,33 @@ class LControlInstruction: public LTemplateInstruction<0, I, T> {
};
-class LApplyArguments: public LTemplateInstruction<1, 4, 1> {
+class LWrapReceiver: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LWrapReceiver(LOperand* receiver,
+ LOperand* function,
+ LOperand* temp) {
+ inputs_[0] = receiver;
+ inputs_[1] = function;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
+
+ LOperand* receiver() { return inputs_[0]; }
+ LOperand* function() { return inputs_[1]; }
+};
+
+
+class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
public:
LApplyArguments(LOperand* function,
LOperand* receiver,
LOperand* length,
- LOperand* elements,
- LOperand* temp) {
+ LOperand* elements) {
inputs_[0] = function;
inputs_[1] = receiver;
inputs_[2] = length;
inputs_[3] = elements;
- temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(ApplyArguments, "apply-arguments")
@@ -589,6 +607,24 @@ class LUnaryMathOperation: public LTemplateInstruction<1, 2, 0> {
};
+class LMathPowHalf: public LTemplateInstruction<1, 2, 1> {
+ public:
+ LMathPowHalf(LOperand* context, LOperand* value, LOperand* temp) {
+ inputs_[1] = context;
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ LOperand* context() { return inputs_[1]; }
+ LOperand* value() { return inputs_[0]; }
+ LOperand* temp() { return temps_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(MathPowHalf, "math-pow-half")
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LCmpObjectEqAndBranch: public LControlInstruction<2, 0> {
public:
LCmpObjectEqAndBranch(LOperand* left, LOperand* right) {
@@ -643,6 +679,19 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
};
+class LIsStringAndBranch: public LControlInstruction<1, 1> {
+ public:
+ LIsStringAndBranch(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LIsSmiAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsSmiAndBranch(LOperand* value) {
@@ -670,6 +719,24 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
};
+class LStringCompareAndBranch: public LControlInstruction<3, 0> {
+ public:
+ LStringCompareAndBranch(LOperand* context, LOperand* left, LOperand* right) {
+ inputs_[0] = context;
+ inputs_[1] = left;
+ inputs_[2] = right;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
+ "string-compare-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ Token::Value op() const { return hydrogen()->token(); }
+};
+
+
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 1> {
public:
LHasInstanceTypeAndBranch(LOperand* value, LOperand* temp) {
@@ -952,6 +1019,24 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
};
+class LDateField: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LDateField(LOperand* date, LOperand* temp, Smi* index)
+ : index_(index) {
+ inputs_[0] = date;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DateField, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(DateField)
+
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 2, 0> {
public:
LThrow(LOperand* context, LOperand* value) {
@@ -1000,6 +1085,17 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
};
+class LRandom: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LRandom(LOperand* global_object) {
+ inputs_[0] = global_object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Random, "random")
+ DECLARE_HYDROGEN_ACCESSOR(Random)
+};
+
+
class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
public:
LArithmeticD(Token::Value op, LOperand* left, LOperand* right)
@@ -1226,16 +1322,16 @@ class LLoadGlobalGeneric: public LTemplateInstruction<1, 2, 0> {
};
-class LStoreGlobalCell: public LTemplateInstruction<0, 1, 2> {
+class LStoreGlobalCell: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LStoreGlobalCell(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ explicit LStoreGlobalCell(LOperand* value) {
inputs_[0] = value;
- temps_[0] = temp1;
- temps_[1] = temp2;
}
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
+
+ LOperand* value() { return inputs_[0]; }
};
@@ -1256,7 +1352,7 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 3, 0> {
LOperand* global_object() { return InputAt(1); }
Handle<Object> name() const { return hydrogen()->name(); }
LOperand* value() { return InputAt(2); }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1330,19 +1426,26 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
};
+class LDeclareGlobals: public LTemplateInstruction<0, 1, 0> {
+ public:
+ explicit LDeclareGlobals(LOperand* context) {
+ inputs_[0] = context;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals, "declare-globals")
+ DECLARE_HYDROGEN_ACCESSOR(DeclareGlobals)
+};
+
+
class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LGlobalObject(LOperand* context, bool qml_global) {
+ explicit LGlobalObject(LOperand* context) {
inputs_[0] = context;
- qml_global_ = qml_global;
}
DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
LOperand* context() { return InputAt(0); }
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
};
@@ -1425,23 +1528,25 @@ class LCallNamed: public LTemplateInstruction<1, 1, 0> {
};
-class LCallFunction: public LTemplateInstruction<1, 1, 0> {
+class LCallFunction: public LTemplateInstruction<1, 2, 0> {
public:
- explicit LCallFunction(LOperand* context) {
+ explicit LCallFunction(LOperand* context, LOperand* function) {
inputs_[0] = context;
+ inputs_[1] = function;
}
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
LOperand* context() { return inputs_[0]; }
- int arity() const { return hydrogen()->argument_count() - 2; }
+ LOperand* function() { return inputs_[1]; }
+ int arity() const { return hydrogen()->argument_count() - 1; }
};
class LCallGlobal: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LCallGlobal(LOperand* context, bool qml_global) : qml_global_(qml_global) {
+ explicit LCallGlobal(LOperand* context) {
inputs_[0] = context;
}
@@ -1453,10 +1558,6 @@ class LCallGlobal: public LTemplateInstruction<1, 1, 0> {
LOperand* context() { return inputs_[0]; }
Handle<String> name() const {return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
-
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
};
@@ -1575,10 +1676,11 @@ class LSmiTag: public LTemplateInstruction<1, 1, 0> {
};
-class LNumberUntagD: public LTemplateInstruction<1, 1, 0> {
+class LNumberUntagD: public LTemplateInstruction<1, 1, 1> {
public:
- explicit LNumberUntagD(LOperand* value) {
+ explicit LNumberUntagD(LOperand* value, LOperand* temp) {
inputs_[0] = value;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
@@ -1643,7 +1745,6 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 3, 0> {
LOperand* value() { return inputs_[2]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
- bool strict_mode() { return strict_mode_flag() == kStrictMode; }
};
@@ -1733,7 +1834,7 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 4, 0> {
LOperand* object() { return inputs_[1]; }
LOperand* key() { return inputs_[2]; }
LOperand* value() { return inputs_[3]; }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1848,14 +1949,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 1> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -1930,6 +2031,33 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
};
+class LAllocateObject: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LAllocateObject(LOperand* context, LOperand* temp) {
+ inputs_[0] = context;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+
+ LOperand* context() { return inputs_[0]; }
+};
+
+
+class LFastLiteral: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LFastLiteral(LOperand* context) {
+ inputs_[0] = context;
+ }
+
+ LOperand* context() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
+ DECLARE_HYDROGEN_ACCESSOR(FastLiteral)
+};
+
+
class LArrayLiteral: public LTemplateInstruction<1, 1, 0> {
public:
explicit LArrayLiteral(LOperand* context) {
@@ -2094,6 +2222,64 @@ class LIn: public LTemplateInstruction<1, 3, 0> {
};
+class LForInPrepareMap: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LForInPrepareMap(LOperand* context, LOperand* object) {
+ inputs_[0] = context;
+ inputs_[1] = object;
+ }
+
+ LOperand* context() { return inputs_[0]; }
+ LOperand* object() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
+};
+
+
+class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInCacheArray(LOperand* map) {
+ inputs_[0] = map;
+ }
+
+ LOperand* map() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
+
+ int idx() {
+ return HForInCacheArray::cast(this->hydrogen_value())->idx();
+ }
+};
+
+
+class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
+ public:
+ LCheckMapValue(LOperand* value, LOperand* map) {
+ inputs_[0] = value;
+ inputs_[1] = map;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* map() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
+};
+
+
+class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadFieldByIndex(LOperand* object, LOperand* index) {
+ inputs_[0] = object;
+ inputs_[1] = index;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+ LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
+};
+
+
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
@@ -2103,7 +2289,6 @@ class LChunk: public ZoneObject {
graph_(graph),
instructions_(32),
pointer_maps_(8),
- num_double_slots_(0),
inlined_closures_(1) { }
void AddInstruction(LInstruction* instruction, HBasicBlock* block);
@@ -2117,8 +2302,6 @@ class LChunk: public ZoneObject {
int ParameterAt(int index);
int GetParameterStackSlot(int index) const;
int spill_slot_count() const { return spill_slot_count_; }
- int num_double_slots() const { return num_double_slots_; }
-
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
@@ -2160,7 +2343,6 @@ class LChunk: public ZoneObject {
HGraph* const graph_;
ZoneList<LInstruction*> instructions_;
ZoneList<LPointerMap*> pointer_maps_;
- int num_double_slots_;
ZoneList<Handle<JSFunction> > inlined_closures_;
};
@@ -2171,7 +2353,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
- isolate_(graph->isolate()),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2201,7 +2383,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
- Zone* zone() { return isolate_->zone(); }
+ Zone* zone() { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2211,7 +2393,6 @@ class LChunkBuilder BASE_EMBEDDED {
void Abort(const char* format, ...);
// Methods for getting operands for Use / Define / Temp.
- LRegister* ToOperand(Register reg);
LUnallocated* ToUnallocated(Register reg);
LUnallocated* ToUnallocated(XMMRegister reg);
@@ -2262,8 +2443,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result);
template<int I, int T>
- LInstruction* Define(LTemplateInstruction<1, I, T>* instr);
- template<int I, int T>
LInstruction* DefineAsRegister(LTemplateInstruction<1, I, T>* instr);
template<int I, int T>
LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr,
@@ -2313,7 +2492,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
- Isolate* isolate_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
index 1676a70..60e38a6 100644
--- a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -357,6 +357,14 @@ void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
}
+void MacroAssembler::CompareRoot(const Operand& with,
+ Heap::RootListIndex index) {
+ // see ROOT_ACCESSOR macro in factory.h
+ Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
+ cmp(with, value);
+}
+
+
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
@@ -479,15 +487,48 @@ void MacroAssembler::StoreNumberToDoubleElements(
}
+void MacroAssembler::CompareMap(Register obj,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode) {
+ cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ Map* transitioned_fast_element_map(
+ map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
+ ASSERT(transitioned_fast_element_map == NULL ||
+ map->elements_kind() != FAST_ELEMENTS);
+ if (transitioned_fast_element_map != NULL) {
+ j(equal, early_success, Label::kNear);
+ cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(transitioned_fast_element_map));
+ }
+
+ Map* transitioned_double_map(
+ map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
+ ASSERT(transitioned_double_map == NULL ||
+ map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+ if (transitioned_double_map != NULL) {
+ j(equal, early_success, Label::kNear);
+ cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(transitioned_double_map));
+ }
+ }
+}
+
+
void MacroAssembler::CheckMap(Register obj,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type) {
+ SmiCheckType smi_check_type,
+ CompareMapMode mode) {
if (smi_check_type == DO_SMI_CHECK) {
JumpIfSmi(obj, fail);
}
- cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
+
+ Label success;
+ CompareMap(obj, map, &success, mode);
j(not_equal, fail);
+ bind(&success);
}
@@ -540,8 +581,7 @@ void MacroAssembler::IsInstanceJSObjectType(Register map,
void MacroAssembler::FCmp() {
if (CpuFeatures::IsSupported(CMOV)) {
fucomip();
- ffree(0);
- fincstp();
+ fstp(0);
} else {
fucompp();
push(eax);
@@ -609,7 +649,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
void MacroAssembler::EnterExitFramePrologue() {
- // Setup the frame structure on the stack.
+ // Set up the frame structure on the stack.
ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
@@ -661,7 +701,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
void MacroAssembler::EnterExitFrame(bool save_doubles) {
EnterExitFramePrologue();
- // Setup argc and argv in callee-saved registers.
+ // Set up argc and argv in callee-saved registers.
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
mov(edi, eax);
lea(esi, Operand(ebp, eax, times_4, offset));
@@ -724,148 +764,145 @@ void MacroAssembler::LeaveApiExitFrame() {
}
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type) {
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
+ int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // The pc (return address) is already on TOS.
- if (try_location == IN_JAVASCRIPT) {
- if (type == TRY_CATCH_HANDLER) {
- push(Immediate(StackHandler::TRY_CATCH));
- } else {
- push(Immediate(StackHandler::TRY_FINALLY));
- }
- push(ebp);
- push(esi);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
- // The frame pointer does not point to a JS frame so we save NULL
- // for ebp. We expect the code throwing an exception to check ebp
- // before dereferencing it to restore the context.
- push(Immediate(StackHandler::ENTRY));
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // We will build up the handler from the bottom by pushing on the stack.
+ // First push the frame pointer and context.
+ if (kind == StackHandler::JS_ENTRY) {
+ // The frame pointer does not point to a JS frame so we save NULL for
+ // ebp. We expect the code throwing an exception to check ebp before
+ // dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
push(Immediate(Smi::FromInt(0))); // No context.
+ } else {
+ push(ebp);
+ push(esi);
}
- // Save the current handler as the next handler.
- push(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
- isolate())));
- // Link this handler as the new current one.
- mov(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
- isolate())),
- esp);
+ // Push the state and the code object.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
+ push(Immediate(state));
+ Push(CodeObject());
+
+ // Link the current handler as the next handler.
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
+ push(Operand::StaticVariable(handler_address));
+ // Set this new handler as the current one.
+ mov(Operand::StaticVariable(handler_address), esp);
}
void MacroAssembler::PopTryHandler() {
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- pop(Operand::StaticVariable(ExternalReference(Isolate::kHandlerAddress,
- isolate())));
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
+ pop(Operand::StaticVariable(handler_address));
add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
}
+void MacroAssembler::JumpToHandlerEntry() {
+ // Compute the handler entry address and jump to it. The handler table is
+ // a fixed array of (smi-tagged) code offsets.
+ // eax = exception, edi = code object, edx = state.
+ mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
+ shr(edx, StackHandler::kKindWidth);
+ mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
+ SmiUntag(edx);
+ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
+ jmp(edi);
+}
+
+
void MacroAssembler::Throw(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // eax must hold the exception.
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in eax.
if (!value.is(eax)) {
mov(eax, value);
}
-
- // Drop the sp to the top of the handler.
- ExternalReference handler_address(Isolate::kHandlerAddress,
- isolate());
+ // Drop the stack pointer to the top of the top handler.
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
mov(esp, Operand::StaticVariable(handler_address));
-
- // Restore next handler, context, and frame pointer; discard handler state.
+ // Restore the next handler.
pop(Operand::StaticVariable(handler_address));
+
+ // Remove the code object and state, compute the handler address in edi.
+ pop(edi); // Code object.
+ pop(edx); // Index and state.
+
+ // Restore the context and frame pointer.
pop(esi); // Context.
pop(ebp); // Frame pointer.
- pop(edx); // State.
// If the handler is a JS frame, restore the context to the frame.
- // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any
- // of them.
+ // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
+ // ebp or esi.
Label skip;
- cmp(edx, Immediate(StackHandler::ENTRY));
- j(equal, &skip, Label::kNear);
+ test(esi, esi);
+ j(zero, &skip, Label::kNear);
mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
bind(&skip);
- ret(0);
+ JumpToHandlerEntry();
}
-void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
- Register value) {
+void MacroAssembler::ThrowUncatchable(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
- // eax must hold the exception.
+ // The exception is expected in eax.
if (!value.is(eax)) {
mov(eax, value);
}
-
- // Drop sp to the top stack handler.
- ExternalReference handler_address(Isolate::kHandlerAddress,
- isolate());
+ // Drop the stack pointer to the top of the top stack handler.
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
mov(esp, Operand::StaticVariable(handler_address));
- // Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- cmp(Operand(esp, kStateOffset), Immediate(StackHandler::ENTRY));
- j(equal, &done, Label::kNear);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- mov(esp, Operand(esp, kNextOffset));
- jmp(&loop);
- bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
- pop(Operand::StaticVariable(handler_address));
+ // Unwind the handlers until the top ENTRY handler is found.
+ Label fetch_next, check_kind;
+ jmp(&check_kind, Label::kNear);
+ bind(&fetch_next);
+ mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(
- Isolate::kExternalCaughtExceptionAddress,
- isolate());
- mov(eax, false);
- mov(Operand::StaticVariable(external_caught), eax);
+ bind(&check_kind);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
+ test(Operand(esp, StackHandlerConstants::kStateOffset),
+ Immediate(StackHandler::KindField::kMask));
+ j(not_zero, &fetch_next);
- // Set pending exception and eax to out of memory exception.
- ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- isolate());
- mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
- mov(Operand::StaticVariable(pending_exception), eax);
- }
+ // Set the top handler address to next handler past the top ENTRY handler.
+ pop(Operand::StaticVariable(handler_address));
- // Discard the context saved in the handler and clear the context pointer.
- pop(edx);
- Set(esi, Immediate(0));
+ // Remove the code object and state, compute the handler address in edi.
+ pop(edi); // Code object.
+ pop(edx); // Index and state.
- // Restore fp from handler and discard handler state.
+ // Clear the context pointer and frame pointer (0 was saved in the handler).
+ pop(esi);
pop(ebp);
- pop(edx); // State.
- ret(0);
+ JumpToHandlerEntry();
}
@@ -937,6 +974,50 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+// Compute the hash code from the untagged key. This must be kept in sync
+// with ComputeIntegerHash in utils.h.
+//
+// Note: r0 will contain hash code
+void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
+ // Xor original key with a seed.
+ if (Serializer::enabled()) {
+ ExternalReference roots_array_start =
+ ExternalReference::roots_array_start(isolate());
+ mov(scratch, Immediate(Heap::kHashSeedRootIndex));
+ mov(scratch,
+ Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
+ SmiUntag(scratch);
+ xor_(r0, scratch);
+ } else {
+ int32_t seed = isolate()->heap()->HashSeed();
+ xor_(r0, Immediate(seed));
+ }
+
+ // hash = ~hash + (hash << 15);
+ mov(scratch, r0);
+ not_(r0);
+ shl(scratch, 15);
+ add(r0, scratch);
+ // hash = hash ^ (hash >> 12);
+ mov(scratch, r0);
+ shr(scratch, 12);
+ xor_(r0, scratch);
+ // hash = hash + (hash << 2);
+ lea(r0, Operand(r0, r0, times_4, 0));
+ // hash = hash ^ (hash >> 4);
+ mov(scratch, r0);
+ shr(scratch, 4);
+ xor_(r0, scratch);
+ // hash = hash * 2057;
+ imul(r0, r0, 2057);
+ // hash = hash ^ (hash >> 16);
+ mov(scratch, r0);
+ shr(scratch, 16);
+ xor_(r0, scratch);
+}
+
+
+
void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Register elements,
Register key,
@@ -962,33 +1043,10 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Label done;
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- mov(r1, r0);
- not_(r0);
- shl(r1, 15);
- add(r0, r1);
- // hash = hash ^ (hash >> 12);
- mov(r1, r0);
- shr(r1, 12);
- xor_(r0, r1);
- // hash = hash + (hash << 2);
- lea(r0, Operand(r0, r0, times_4, 0));
- // hash = hash ^ (hash >> 4);
- mov(r1, r0);
- shr(r1, 4);
- xor_(r0, r1);
- // hash = hash * 2057;
- imul(r0, r0, 2057);
- // hash = hash ^ (hash >> 16);
- mov(r1, r0);
- shr(r1, 16);
- xor_(r0, r1);
+ GetNumberHash(r0, r1);
// Compute capacity mask.
- mov(r1, FieldOperand(elements, NumberDictionary::kCapacityOffset));
+ mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
shr(r1, kSmiTagSize); // convert smi to int
dec(r1);
@@ -999,19 +1057,19 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
mov(r2, r0);
// Compute the masked index: (hash + i + i * i) & mask.
if (i > 0) {
- add(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
+ add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
}
and_(r2, r1);
// Scale the index by multiplying by the entry size.
- ASSERT(NumberDictionary::kEntrySize == 3);
+ ASSERT(SeededNumberDictionary::kEntrySize == 3);
lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
// Check if the key matches.
cmp(key, FieldOperand(elements,
r2,
times_pointer_size,
- NumberDictionary::kElementsStartOffset));
+ SeededNumberDictionary::kElementsStartOffset));
if (i != (kProbes - 1)) {
j(equal, &done);
} else {
@@ -1022,7 +1080,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
bind(&done);
// Check that the value is a normal propety.
const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
ASSERT_EQ(NORMAL, 0);
test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
@@ -1030,7 +1088,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// Get the value at the masked, scaled index.
const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + kPointerSize;
mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
}
@@ -1311,7 +1369,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
add(scratch1, Immediate(kObjectAlignmentMask));
and_(scratch1, Immediate(~kObjectAlignmentMask));
- // Allocate ascii string in new space.
+ // Allocate ASCII string in new space.
AllocateInNewSpace(SeqAsciiString::kHeaderSize,
times_1,
scratch1,
@@ -1339,7 +1397,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
Label* gc_required) {
ASSERT(length > 0);
- // Allocate ascii string in new space.
+ // Allocate ASCII string in new space.
AllocateInNewSpace(SeqAsciiString::SizeFor(length),
result,
scratch1,
@@ -1857,11 +1915,13 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Handle<Code> code_constant,
const Operand& code_operand,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
Label::Distance done_near,
const CallWrapper& call_wrapper,
CallKind call_kind) {
bool definitely_matches = false;
+ *definitely_mismatches = false;
Label invoke;
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
@@ -1877,6 +1937,7 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
// arguments.
definitely_matches = true;
} else {
+ *definitely_mismatches = true;
mov(ebx, expected.immediate());
}
}
@@ -1914,7 +1975,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
SetCallKind(ecx, call_kind);
call(adaptor, RelocInfo::CODE_TARGET);
call_wrapper.AfterCall();
- jmp(done, done_near);
+ if (!*definitely_mismatches) {
+ jmp(done, done_near);
+ }
} else {
SetCallKind(ecx, call_kind);
jmp(adaptor, RelocInfo::CODE_TARGET);
@@ -1934,20 +1997,23 @@ void MacroAssembler::InvokeCode(const Operand& code,
ASSERT(flag == JUMP_FUNCTION || has_frame());
Label done;
+ bool definitely_mismatches = false;
InvokePrologue(expected, actual, Handle<Code>::null(), code,
- &done, flag, Label::kNear, call_wrapper,
- call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code));
- SetCallKind(ecx, call_kind);
- call(code);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(ecx, call_kind);
- jmp(code);
+ &done, &definitely_mismatches, flag, Label::kNear,
+ call_wrapper, call_kind);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code));
+ SetCallKind(ecx, call_kind);
+ call(code);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(ecx, call_kind);
+ jmp(code);
+ }
+ bind(&done);
}
- bind(&done);
}
@@ -1963,19 +2029,22 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
Label done;
Operand dummy(eax, 0);
- InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
- call_wrapper, call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code, rmode));
- SetCallKind(ecx, call_kind);
- call(code, rmode);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(ecx, call_kind);
- jmp(code, rmode);
+ bool definitely_mismatches = false;
+ InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
+ flag, Label::kNear, call_wrapper, call_kind);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code, rmode));
+ SetCallKind(ecx, call_kind);
+ call(code, rmode);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(ecx, call_kind);
+ jmp(code, rmode);
+ }
+ bind(&done);
}
- bind(&done);
}
@@ -2008,7 +2077,7 @@ void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
ASSERT(flag == JUMP_FUNCTION || has_frame());
// Get the function and setup the context.
- mov(edi, Immediate(function));
+ LoadHeapObject(edi, function);
mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
ParameterCount expected(function->shared()->formal_parameter_count());
@@ -2081,6 +2150,46 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
}
+void MacroAssembler::LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match) {
+ // Load the global or builtins object from the current context.
+ mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ mov(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+
+ // Check that the function's map is the same as the expected cached map.
+ int expected_index =
+ Context::GetContextMapIndexFromElementsKind(expected_kind);
+ cmp(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ j(not_equal, no_map_match);
+
+ // Use the transitioned cached map.
+ int trans_index =
+ Context::GetContextMapIndexFromElementsKind(transitioned_kind);
+ mov(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+}
+
+
+void MacroAssembler::LoadInitialArrayMap(
+ Register function_in, Register scratch, Register map_out) {
+ ASSERT(!function_in.is(map_out));
+ Label done;
+ mov(map_out, FieldOperand(function_in,
+ JSFunction::kPrototypeOrInitialMapOffset));
+ if (!FLAG_smi_only_arrays) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ map_out,
+ scratch,
+ &done);
+ }
+ bind(&done);
+}
+
+
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
mov(function, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
@@ -2137,6 +2246,29 @@ int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
}
+void MacroAssembler::LoadHeapObject(Register result,
+ Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ mov(result, Operand::Cell(cell));
+ } else {
+ mov(result, object);
+ }
+}
+
+
+void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ push(Operand::Cell(cell));
+ } else {
+ Push(object);
+ }
+}
+
+
void MacroAssembler::Ret() {
ret(0);
}
@@ -2154,8 +2286,6 @@ void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
}
-
-
void MacroAssembler::Drop(int stack_elements) {
if (stack_elements > 0) {
add(esp, Immediate(stack_elements * kPointerSize));
@@ -2170,11 +2300,6 @@ void MacroAssembler::Move(Register dst, Register src) {
}
-void MacroAssembler::Move(Register dst, Handle<Object> value) {
- mov(dst, value);
-}
-
-
void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
@@ -2372,7 +2497,7 @@ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
- // Check that both are flat ascii strings.
+ // Check that both are flat ASCII strings.
const int kFlatAsciiStringMask =
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
@@ -2651,6 +2776,46 @@ void MacroAssembler::EnsureNotWhite(
bind(&done);
}
+
+void MacroAssembler::CheckEnumCache(Label* call_runtime) {
+ Label next;
+ mov(ecx, eax);
+ bind(&next);
+
+ // Check that there are no elements. Register ecx contains the
+ // current JS object we've reached through the prototype chain.
+ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
+ isolate()->factory()->empty_fixed_array());
+ j(not_equal, call_runtime);
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in ebx for the subsequent
+ // prototype load.
+ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
+ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
+ JumpIfSmi(edx, call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (edx). This is the case if the next enumeration
+ // index field does not contain a smi.
+ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
+ JumpIfSmi(edx, call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ Label check_prototype;
+ cmp(ecx, eax);
+ j(equal, &check_prototype, Label::kNear);
+ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ cmp(edx, isolate()->factory()->empty_fixed_array());
+ j(not_equal, call_runtime);
+
+ // Load the prototype from the map and loop if non-null.
+ bind(&check_prototype);
+ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
+ cmp(ecx, isolate()->factory()->null_value());
+ j(not_equal, &next);
+}
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32
diff --git a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
index 92378e4..66d1ce7 100644
--- a/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/macro-assembler-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -221,6 +221,22 @@ class MacroAssembler: public Assembler {
// Find the function context up the context chain.
void LoadContext(Register dst, int context_chain_length);
+ // Conditionally load the cached Array transitioned map of type
+ // transitioned_kind from the global context if the map in register
+ // map_in_out is the cached Array map in the global context of
+ // expected_kind.
+ void LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match);
+
+ // Load the initial map for new Arrays from a JSFunction.
+ void LoadInitialArrayMap(Register function_in,
+ Register scratch,
+ Register map_out);
+
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
@@ -237,10 +253,21 @@ class MacroAssembler: public Assembler {
void StoreToSafepointRegisterSlot(Register dst, Immediate src);
void LoadFromSafepointRegisterSlot(Register dst, Register src);
+ void LoadHeapObject(Register result, Handle<HeapObject> object);
+ void PushHeapObject(Handle<HeapObject> object);
+
+ void LoadObject(Register result, Handle<Object> object) {
+ if (object->IsHeapObject()) {
+ LoadHeapObject(result, Handle<HeapObject>::cast(object));
+ } else {
+ Set(result, Immediate(object));
+ }
+ }
+
// ---------------------------------------------------------------------------
// JavaScript invokes
- // Setup call kind marking in ecx. The method takes ecx as an
+ // Set up call kind marking in ecx. The method takes ecx as an
// explicit first parameter to make the code more readable at the
// call sites.
void SetCallKind(Register dst, CallKind kind);
@@ -305,8 +332,9 @@ class MacroAssembler: public Assembler {
void SafeSet(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
- // Compare a register against a known root, e.g. undefined, null, true, ...
+ // Compare against a known root, e.g. undefined, null, true, ...
void CompareRoot(Register with, Heap::RootListIndex index);
+ void CompareRoot(const Operand& with, Heap::RootListIndex index);
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
@@ -344,13 +372,24 @@ class MacroAssembler: public Assembler {
Label* fail,
bool specialize_for_processor);
+ // Compare an object's map with the specified map and its transitioned
+ // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. FLAGS are set with
+ // result of map compare. If multiple map compares are required, the compare
+ // sequences branches to early_success.
+ void CompareMap(Register obj,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
// Check if the map of an object is equal to a specified map and branch to
// label if not. Skip the smi check if not required (object is known to be a
- // heap object)
+ // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
+ // against maps that are ElementsKind transition maps of the specified map.
void CheckMap(Register obj,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type);
+ SmiCheckType smi_check_type,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
// Check if the map of an object is equal to a specified map and branch to a
// specified target if equal. Skip the smi check if not required (object is
@@ -451,17 +490,17 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Exception handling
- // Push a new try handler and link into try handler chain. The return
- // address must be pushed before calling this helper.
- void PushTryHandler(CodeLocation try_location, HandlerType type);
+ // Push a new try handler and link it into try handler chain.
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
- // Activate the top handler in the try hander chain.
+ // Throw to the top handler in the try hander chain.
void Throw(Register value);
- void ThrowUncatchable(UncatchableExceptionType type, Register value);
+ // Throw past all JS frames to the top JS entry frame.
+ void ThrowUncatchable(Register value);
// ---------------------------------------------------------------------------
// Inline caching support
@@ -473,6 +512,7 @@ class MacroAssembler: public Assembler {
Register scratch,
Label* miss);
+ void GetNumberHash(Register r0, Register scratch);
void LoadFromNumberDictionary(Label* miss,
Register elements,
@@ -717,10 +757,8 @@ class MacroAssembler: public Assembler {
// Move if the registers are not identical.
void Move(Register target, Register source);
- void Move(Register target, Handle<Object> value);
-
// Push a handle value.
- void Push(Handle<Object> handle) { push(handle); }
+ void Push(Handle<Object> handle) { push(Immediate(handle)); }
Handle<Object> CodeObject() {
ASSERT(!code_object_.is_null());
@@ -768,7 +806,7 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// String utilities.
- // Check whether the instance type represents a flat ascii string. Jump to the
+ // Check whether the instance type represents a flat ASCII string. Jump to the
// label if not. If the instance type can be scratched specify same register
// for both instance type and scratch.
void JumpIfInstanceTypeIsNotSequentialAscii(Register instance_type,
@@ -791,6 +829,10 @@ class MacroAssembler: public Assembler {
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
+ // Expects object in eax and returns map with validated enum cache
+ // in eax. Assumes that any other register can be used as a scratch.
+ void CheckEnumCache(Label* call_runtime);
+
private:
bool generating_stub_;
bool allow_stub_calls_;
@@ -804,6 +846,7 @@ class MacroAssembler: public Assembler {
Handle<Code> code_constant,
const Operand& code_operand,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
Label::Distance done_distance,
const CallWrapper& call_wrapper = NullCallWrapper(),
@@ -842,6 +885,10 @@ class MacroAssembler: public Assembler {
Register bitmap_reg,
Register mask_reg);
+ // Helper for throwing exceptions. Compute a handler address and jump to
+ // it. See the implementation for register usage.
+ void JumpToHandlerEntry();
+
// Compute memory operands for safepoint stack slots.
Operand SafepointRegisterSlot(Register reg);
static int SafepointRegisterStackIndex(int reg_code);
@@ -876,32 +923,29 @@ class CodePatcher {
// Static helper functions.
// Generate an Operand for loading a field from an object.
-static inline Operand FieldOperand(Register object, int offset) {
+inline Operand FieldOperand(Register object, int offset) {
return Operand(object, offset - kHeapObjectTag);
}
// Generate an Operand for loading an indexed field from an object.
-static inline Operand FieldOperand(Register object,
- Register index,
- ScaleFactor scale,
- int offset) {
+inline Operand FieldOperand(Register object,
+ Register index,
+ ScaleFactor scale,
+ int offset) {
return Operand(object, index, scale, offset - kHeapObjectTag);
}
-static inline Operand ContextOperand(Register context, int index) {
+inline Operand ContextOperand(Register context, int index) {
return Operand(context, Context::SlotOffset(index));
}
-static inline Operand GlobalObjectOperand() {
+inline Operand GlobalObjectOperand() {
return ContextOperand(esi, Context::GLOBAL_INDEX);
}
-static inline Operand QmlGlobalObjectOperand() {
- return ContextOperand(esi, Context::QML_GLOBAL_INDEX);
-}
// Generates an Operand for saving parameters after PrepareCallApiFunction.
Operand ApiParameterOperand(int index);
diff --git a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
index dbf01ab..0029f33 100644
--- a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.cc
@@ -210,7 +210,7 @@ void RegExpMacroAssemblerIA32::CheckCharacters(Vector<const uc16> str,
bool check_end_of_string) {
#ifdef DEBUG
// If input is ASCII, don't even bother calling here if the string to
- // match contains a non-ascii character.
+ // match contains a non-ASCII character.
if (mode_ == ASCII) {
ASSERT(String::IsAscii(str.start(), str.length()));
}
@@ -501,9 +501,13 @@ void RegExpMacroAssemblerIA32::CheckNotCharacter(uint32_t c,
void RegExpMacroAssemblerIA32::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ mov(eax, current_character());
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(current_character(), Immediate(mask));
+ } else {
+ __ mov(eax, mask);
+ __ and_(eax, current_character());
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(equal, on_equal);
}
@@ -511,9 +515,13 @@ void RegExpMacroAssemblerIA32::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerIA32::CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_not_equal) {
- __ mov(eax, current_character());
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(current_character(), Immediate(mask));
+ } else {
+ __ mov(eax, mask);
+ __ and_(eax, current_character());
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
@@ -523,14 +531,53 @@ void RegExpMacroAssemblerIA32::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- ASSERT(minus < String::kMaxUC16CharCode);
+ ASSERT(minus < String::kMaxUtf16CodeUnit);
__ lea(eax, Operand(current_character(), -minus));
- __ and_(eax, mask);
- __ cmp(eax, c);
+ if (c == 0) {
+ __ test(eax, Immediate(mask));
+ } else {
+ __ and_(eax, mask);
+ __ cmp(eax, c);
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
+void RegExpMacroAssemblerIA32::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ lea(eax, Operand(current_character(), -from));
+ __ cmp(eax, to - from);
+ BranchOrBacktrack(below_equal, on_in_range);
+}
+
+
+void RegExpMacroAssemblerIA32::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ lea(eax, Operand(current_character(), -from));
+ __ cmp(eax, to - from);
+ BranchOrBacktrack(above, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerIA32::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ mov(eax, Immediate(table));
+ Register index = current_character();
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ mov(ebx, kTableSize - 1);
+ __ and_(ebx, current_character());
+ index = ebx;
+ }
+ __ cmpb(FieldOperand(eax, index, times_1, ByteArray::kHeaderSize), 0);
+ BranchOrBacktrack(not_equal, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -1085,7 +1132,7 @@ int RegExpMacroAssemblerIA32::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
int delta = code_handle->address() - re_code->address();
diff --git a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
index d504470..78cd069 100644
--- a/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
+++ b/src/3rdparty/v8/src/ia32/regexp-macro-assembler-ia32.h
@@ -78,6 +78,14 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
diff --git a/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc b/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
index 3089a69..fd26779 100644
--- a/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
+++ b/src/3rdparty/v8/src/ia32/stub-cache-ia32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,19 +44,30 @@ static void ProbeTable(Isolate* isolate,
Code::Flags flags,
StubCache::Table table,
Register name,
+ Register receiver,
+ // Number of the cache entry pointer-size scaled.
Register offset,
Register extra) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
Label miss;
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ lea(offset, Operand(offset, offset, times_2, 0));
+
if (extra.is_valid()) {
// Get the code entry from the cache.
- __ mov(extra, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
// Check that the key in the entry matches the name.
- __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
+ __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
+ __ j(not_equal, &miss);
+
+ // Check the map matches.
+ __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
+ __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
__ j(not_equal, &miss);
// Check that the flags match what we're looking for.
@@ -65,6 +76,14 @@ static void ProbeTable(Isolate* isolate,
__ cmp(offset, flags);
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Jump to the first instruction in the code stub.
__ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(extra);
@@ -75,11 +94,19 @@ static void ProbeTable(Isolate* isolate,
__ push(offset);
// Check that the key in the entry matches the name.
- __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
+ __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
__ j(not_equal, &miss);
+ // Check the map matches.
+ __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
+ __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ j(not_equal, &miss);
+
+ // Restore offset register.
+ __ mov(offset, Operand(esp, 0));
+
// Get the code entry from the cache.
- __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
// Check that the flags match what we're looking for.
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
@@ -87,9 +114,17 @@ static void ProbeTable(Isolate* isolate,
__ cmp(offset, flags);
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Restore offset and re-load code entry from cache.
__ pop(offset);
- __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
+ __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
// Jump to the first instruction in the code stub.
__ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -159,12 +194,13 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Label miss;
- // Assert that code is valid. The shifting code relies on the entry size
- // being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Assert that code is valid. The multiplying code relies on the entry size
+ // being 12.
+ ASSERT(sizeof(Entry) == 12);
// Assert the flags do not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -176,37 +212,51 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!extra.is(name));
ASSERT(!extra.is(scratch));
- // Assert scratch and extra registers are valid, and extra2 is unused.
+ // Assert scratch and extra registers are valid, and extra2/3 are unused.
ASSERT(!scratch.is(no_reg));
ASSERT(extra2.is(no_reg));
+ ASSERT(extra3.is(no_reg));
+
+ Register offset = scratch;
+ scratch = no_reg;
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
// Get the map of the receiver and compute the hash.
- __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
- __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
- __ xor_(scratch, flags);
- __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
+ __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(offset, flags);
+ // We mask out the last two bits because they are not part of the hash and
+ // they are always 01 for maps. Also in the two 'and' instructions below.
+ __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ // ProbeTable expects the offset to be pointer scaled, which it is, because
+ // the heap object tag size is 2 and the pointer size log 2 is also 2.
+ ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
// Probe the primary table.
- ProbeTable(isolate(), masm, flags, kPrimary, name, scratch, extra);
+ ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
// Primary miss: Compute hash for secondary probe.
- __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
- __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
- __ xor_(scratch, flags);
- __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
- __ sub(scratch, name);
- __ add(scratch, Immediate(flags));
- __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
+ __ mov(offset, FieldOperand(name, String::kHashFieldOffset));
+ __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(offset, flags);
+ __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
+ __ sub(offset, name);
+ __ add(offset, Immediate(flags));
+ __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
// Probe the secondary table.
- ProbeTable(isolate(), masm, flags, kSecondary, name, scratch, extra);
+ ProbeTable(
+ isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
}
@@ -429,7 +479,7 @@ static void GenerateFastApiCall(MacroAssembler* masm,
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
- __ mov(edi, Immediate(function));
+ __ LoadHeapObject(edi, function);
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Pass the additional arguments.
@@ -695,13 +745,9 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Register name_reg,
Register scratch,
Label* miss_label) {
- // Check that the object isn't a smi.
- __ JumpIfSmi(receiver_reg, miss_label);
-
// Check that the map of the object hasn't changed.
- __ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
- Immediate(Handle<Map>(object->map())));
- __ j(not_equal, miss_label);
+ __ CheckMap(receiver_reg, Handle<Map>(object->map()),
+ miss_label, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -878,13 +924,10 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
if (in_new_space) {
// Save the map in scratch1 for later.
__ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
- __ cmp(scratch1, Immediate(current_map));
- } else {
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Immediate(current_map));
}
- // Branch on the result of the map check.
- __ j(not_equal, miss);
+ __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
+
// Check access rights to the global object. This has to happen after
// the map check so that we know that the object is actually a global
// object.
@@ -916,9 +959,8 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
// Check the holder map.
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Immediate(Handle<Map>(holder->map())));
- __ j(not_equal, miss);
+ __ CheckMap(reg, Handle<Map>(holder->map()),
+ miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform security check for access to the global object.
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -1000,7 +1042,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ push(scratch3); // Restore return address.
- // 3 elements array for v8::Agruments::values_, handler for name and pointer
+ // 3 elements array for v8::Arguments::values_, handler for name and pointer
// to the values (it considered as smi in GC).
const int kStackSpace = 5;
const int kApiArgc = 2;
@@ -1025,7 +1067,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
@@ -1036,7 +1078,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
// Return the constant value.
- __ mov(eax, value);
+ __ LoadHeapObject(eax, value);
__ ret(0);
}
@@ -1061,7 +1103,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// and CALLBACKS, so inline only them, other cases may be added
// later.
bool compile_followup_inline = false;
- if (lookup->IsProperty() && lookup->IsCacheable()) {
+ if (lookup->IsFound() && lookup->IsCacheable()) {
if (lookup->type() == FIELD) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
@@ -1203,14 +1245,9 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
// Get the receiver from the stack.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(edx, miss);
- }
// Check that the maps haven't changed.
+ __ JumpIfSmi(edx, miss);
CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
}
@@ -1353,25 +1390,25 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else {
Label call_builtin;
- // Get the elements array of the object.
- __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(factory()->fixed_array_map()));
- __ j(not_equal, &call_builtin);
-
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
+ // Get the elements array of the object.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
+ Immediate(factory()->fixed_array_map()));
+ __ j(not_equal, &call_builtin);
+
// Get the array's length into eax and calculate new length.
__ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ add(eax, Immediate(Smi::FromInt(argc)));
- // Get the element's length into ecx.
- __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
+ // Get the elements' length into ecx.
+ __ mov(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmp(eax, ecx);
@@ -1384,29 +1421,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
- // Push the element.
- __ lea(edx, FieldOperand(ebx,
- eax, times_half_pointer_size,
- FixedArray::kHeaderSize - argc * kPointerSize));
- __ mov(Operand(edx, 0), ecx);
+ // Store the value.
+ __ mov(FieldOperand(edi,
+ eax,
+ times_half_pointer_size,
+ FixedArray::kHeaderSize - argc * kPointerSize),
+ ecx);
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
- __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(edi, &call_builtin);
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(ebx, &not_fast_object, Label::kNear);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(&not_fast_object);
+ __ CheckFastSmiOnlyElements(ebx, &call_builtin);
+ // edi: elements array
+ // edx: receiver
+ // ebx: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ ebx,
+ edi,
+ &call_builtin);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ // Restore edi.
+ __ mov(edi, FieldOperand(edx, JSArray::kElementsOffset));
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(ebx, &call_builtin);
+ }
// Save new length.
__ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
- // Push the element.
- __ lea(edx, FieldOperand(ebx,
+ // Store the value.
+ __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ mov(Operand(edx, 0), ecx);
- __ RecordWrite(ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ __ RecordWrite(edi, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ ret((argc + 1) * kPointerSize);
@@ -1416,11 +1476,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin);
}
- __ mov(edi, Operand(esp, argc * kPointerSize));
+ __ mov(ebx, Operand(esp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
- __ JumpIfSmi(edi, &no_fast_elements_check);
+ __ JumpIfSmi(ebx, &no_fast_elements_check);
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
__ CheckFastObjectElements(ecx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
@@ -1439,7 +1499,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
// Check if it's the end of elements.
- __ lea(edx, FieldOperand(ebx,
+ __ lea(edx, FieldOperand(edi,
eax, times_half_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmp(edx, ecx);
@@ -1452,7 +1512,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
// Push the argument...
- __ mov(Operand(edx, 0), edi);
+ __ mov(Operand(edx, 0), ebx);
// ... and fill the rest with holes.
for (int i = 1; i < kAllocationDelta; i++) {
__ mov(Operand(edx, i * kPointerSize),
@@ -1464,13 +1524,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
- __ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
+ __ RecordWrite(edi, edx, ebx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to edx as finish sequence assumes it's here.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
- __ add(FieldOperand(ebx, FixedArray::kLengthOffset),
+ __ add(FieldOperand(edi, FixedArray::kLengthOffset),
Immediate(Smi::FromInt(kAllocationDelta)));
// NOTE: This only happen in new-space, where we don't
@@ -1624,7 +1684,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
Register receiver = ebx;
Register index = edi;
- Register scratch = edx;
Register result = eax;
__ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
if (argc > 0) {
@@ -1635,7 +1694,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
StringCharCodeAtGenerator generator(receiver,
index,
- scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1709,8 +1767,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
Register receiver = eax;
Register index = edi;
- Register scratch1 = ebx;
- Register scratch2 = edx;
+ Register scratch = edx;
Register result = eax;
__ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
if (argc > 0) {
@@ -1721,8 +1778,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
StringCharAtGenerator generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -2173,7 +2229,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case STRING_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
// Check that the object is a string or a symbol.
__ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
__ j(above_equal, &miss);
@@ -2191,7 +2247,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case NUMBER_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a smi or a heap number.
__ JumpIfSmi(edx, &fast);
@@ -2212,7 +2268,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case BOOLEAN_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a boolean.
__ cmp(edx, factory()->true_value());
@@ -2342,7 +2398,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
__ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
}
- // Setup the context (function already in edi).
+ // Set up the context (function already in edi).
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
@@ -2407,13 +2463,9 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(edx, &miss);
-
// Check that the map of the object hasn't changed.
- __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- Immediate(Handle<Map>(object->map())));
- __ j(not_equal, &miss);
+ __ CheckMap(edx, Handle<Map>(object->map()),
+ &miss, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -2457,13 +2509,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(edx, &miss);
-
// Check that the map of the object hasn't changed.
- __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- Immediate(Handle<Map>(receiver->map())));
- __ j(not_equal, &miss);
+ __ CheckMap(edx, Handle<Map>(receiver->map()),
+ &miss, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) {
@@ -2526,23 +2574,9 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
// Store the value in the cell.
__ mov(cell_operand, eax);
- Label done;
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
-
- __ mov(ecx, eax);
- __ lea(edx, cell_operand);
- // Cells are always in the remembered set.
- __ RecordWrite(ebx, // Object.
- edx, // Address.
- ecx, // Value.
- kDontSaveFPRegs,
- OMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
+ // No write barrier here, because cells are always rescanned.
// Return the value (register eax).
- __ bind(&done);
-
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->named_store_global_inline(), 1);
__ ret(0);
@@ -2602,7 +2636,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
ElementsKind elements_kind = receiver_map->elements_kind();
bool is_jsarray = receiver_map->instance_type() == JS_ARRAY_TYPE;
Handle<Code> stub =
- KeyedStoreElementStub(is_jsarray, elements_kind).GetCode();
+ KeyedStoreElementStub(is_jsarray, elements_kind, grow_mode_).GetCode();
__ DispatchMap(edx, receiver_map, stub, DO_SMI_CHECK);
@@ -2733,7 +2767,7 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name) {
// ----------- S t a t e -------------
// -- eax : receiver
@@ -2790,14 +2824,8 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
// -----------------------------------
Label miss;
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual loads. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(eax, &miss);
- }
-
// Check that the maps haven't changed.
+ __ JumpIfSmi(eax, &miss);
CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss);
// Get the value from the cell.
@@ -2895,7 +2923,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
// ----------- S t a t e -------------
// -- eax : key
// -- edx : receiver
@@ -3396,8 +3424,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
// If we fail allocation of the HeapNumber, we still have a value on
// top of the FPU stack. Remove it.
__ bind(&failed_allocation);
- __ ffree();
- __ fincstp();
+ __ fstp(0);
// Fall through to slow case.
// Slow case: Jump to runtime.
@@ -3714,8 +3741,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
// A value was pushed on the floating point stack before the allocation, if
// the allocation fails it needs to be removed.
if (!CpuFeatures::IsSupported(SSE2)) {
- __ ffree();
- __ fincstp();
+ __ fstp(0);
}
Handle<Code> slow_ic =
masm->isolate()->builtins()->KeyedLoadIC_Slow();
@@ -3731,14 +3757,16 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
void KeyedStoreStubCompiler::GenerateStoreFastElement(
MacroAssembler* masm,
bool is_js_array,
- ElementsKind elements_kind) {
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, grow, slow, transition_elements_kind;
+ Label check_capacity, prepare_slow, finish_store, commit_backing_store;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -3746,24 +3774,32 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi.
__ JumpIfNotSmi(ecx, &miss_force_generic);
+ if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ __ JumpIfNotSmi(eax, &transition_elements_kind);
+ }
+
// Get the elements array and make sure it is a fast element array, not 'cow'.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
- __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
- Immediate(masm->isolate()->factory()->fixed_array_map()));
- __ j(not_equal, &miss_force_generic);
-
if (is_js_array) {
// Check that the key is within bounds.
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
- __ j(above_equal, &miss_force_generic);
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ j(above_equal, &grow);
+ } else {
+ __ j(above_equal, &miss_force_generic);
+ }
} else {
// Check that the key is within bounds.
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
__ j(above_equal, &miss_force_generic);
}
+ __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
+ Immediate(masm->isolate()->factory()->fixed_array_map()));
+ __ j(not_equal, &miss_force_generic);
+
+ __ bind(&finish_store);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- __ JumpIfNotSmi(eax, &transition_elements_kind);
// ecx is a smi, use times_half_pointer_size instead of
// times_pointer_size
__ mov(FieldOperand(edi,
@@ -3781,8 +3817,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
FixedArray::kHeaderSize));
__ mov(Operand(ecx, 0), eax);
// Make sure to preserve the value in register eax.
- __ mov(edx, eax);
- __ RecordWrite(edi, ecx, edx, kDontSaveFPRegs);
+ __ mov(ebx, eax);
+ __ RecordWrite(edi, ecx, ebx, kDontSaveFPRegs);
}
// Done.
@@ -3798,19 +3834,94 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ jmp(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Handle transition requiring the array to grow.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags are already set by previous
+ // compare.
+ __ j(not_equal, &miss_force_generic);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+ __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
+ __ j(not_equal, &check_capacity);
+
+ int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
+ // Restore the key, which is known to be the array length.
+
+ // eax: value
+ // ecx: key
+ // edx: receiver
+ // edi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ mov(FieldOperand(edi, JSObject::kMapOffset),
+ Immediate(masm->isolate()->factory()->fixed_array_map()));
+ __ mov(FieldOperand(edi, FixedArray::kLengthOffset),
+ Immediate(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+ __ mov(ebx, Immediate(masm->isolate()->factory()->the_hole_value()));
+ for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
+ __ mov(FieldOperand(edi, FixedArray::SizeFor(i)), ebx);
+ }
+
+ // Store the element at index zero.
+ __ mov(FieldOperand(edi, FixedArray::SizeFor(0)), eax);
+
+ // Install the new backing store in the JSArray.
+ __ mov(FieldOperand(edx, JSObject::kElementsOffset), edi);
+ __ RecordWriteField(edx, JSObject::kElementsOffset, edi, ebx,
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ mov(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ __ ret(0);
+
+ __ bind(&check_capacity);
+ __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
+ Immediate(masm->isolate()->factory()->fixed_cow_array_map()));
+ __ j(equal, &miss_force_generic);
+
+ // eax: value
+ // ecx: key
+ // edx: receiver
+ // edi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset));
+ __ j(above_equal, &slow);
+
+ // Grow the array and finish the store.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ __ jmp(&finish_store);
+
+ __ bind(&prepare_slow);
+ // Restore the key, which is known to be the array length.
+ __ mov(ecx, Immediate(0));
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ jmp(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
MacroAssembler* masm,
- bool is_js_array) {
+ bool is_js_array,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- eax : value
// -- ecx : key
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, grow, slow;
+ Label check_capacity, prepare_slow, finish_store, commit_backing_store;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -3825,19 +3936,20 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
if (is_js_array) {
// Check that the key is within bounds.
__ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ j(above_equal, &grow);
+ } else {
+ __ j(above_equal, &miss_force_generic);
+ }
} else {
// Check that the key is within bounds.
__ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
+ __ j(above_equal, &miss_force_generic);
}
- __ j(above_equal, &miss_force_generic);
- __ StoreNumberToDoubleElements(eax,
- edi,
- ecx,
- edx,
- xmm0,
- &transition_elements_kind,
- true);
+ __ bind(&finish_store);
+ __ StoreNumberToDoubleElements(eax, edi, ecx, edx, xmm0,
+ &transition_elements_kind, true);
__ ret(0);
// Handle store cache miss, replacing the ic with the generic stub.
@@ -3850,6 +3962,79 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ jmp(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Handle transition requiring the array to grow.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags are already set by previous
+ // compare.
+ __ j(not_equal, &miss_force_generic);
+
+ // Transition on values that can't be stored in a FixedDoubleArray.
+ Label value_is_smi;
+ __ JumpIfSmi(eax, &value_is_smi);
+ __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
+ Immediate(Handle<Map>(masm->isolate()->heap()->heap_number_map())));
+ __ j(not_equal, &transition_elements_kind);
+ __ bind(&value_is_smi);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+ __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
+ __ j(not_equal, &check_capacity);
+
+ int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
+ // Restore the key, which is known to be the array length.
+ __ mov(ecx, Immediate(0));
+
+ // eax: value
+ // ecx: key
+ // edx: receiver
+ // edi: elements
+ // Initialize the new FixedDoubleArray. Leave elements unitialized for
+ // efficiency, they are guaranteed to be initialized before use.
+ __ mov(FieldOperand(edi, JSObject::kMapOffset),
+ Immediate(masm->isolate()->factory()->fixed_double_array_map()));
+ __ mov(FieldOperand(edi, FixedDoubleArray::kLengthOffset),
+ Immediate(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+
+ // Install the new backing store in the JSArray.
+ __ mov(FieldOperand(edx, JSObject::kElementsOffset), edi);
+ __ RecordWriteField(edx, JSObject::kElementsOffset, edi, ebx,
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&check_capacity);
+ // eax: value
+ // ecx: key
+ // edx: receiver
+ // edi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ cmp(ecx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
+ __ j(above_equal, &slow);
+
+ // Grow the array and finish the store.
+ __ add(FieldOperand(edx, JSArray::kLengthOffset),
+ Immediate(Smi::FromInt(1)));
+ __ jmp(&finish_store);
+
+ __ bind(&prepare_slow);
+ // Restore the key, which is known to be the array length.
+ __ mov(ecx, Immediate(0));
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ jmp(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
diff --git a/src/3rdparty/v8/src/ic-inl.h b/src/3rdparty/v8/src/ic-inl.h
index 498cf3a..6a86921 100644
--- a/src/3rdparty/v8/src/ic-inl.h
+++ b/src/3rdparty/v8/src/ic-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,8 @@
#define V8_IC_INL_H_
#include "ic.h"
+
+#include "compiler.h"
#include "debug.h"
#include "macro-assembler.h"
@@ -36,7 +38,7 @@ namespace v8 {
namespace internal {
-Address IC::address() {
+Address IC::address() const {
// Get the address of the call.
Address result = pc() - Assembler::kCallTargetAddressOffset;
@@ -77,18 +79,20 @@ Code* IC::GetTargetAtAddress(Address address) {
void IC::SetTargetAtAddress(Address address, Code* target) {
ASSERT(target->is_inline_cache_stub() || target->is_compare_ic_stub());
+ Code* old_target = GetTargetAtAddress(address);
#ifdef DEBUG
// STORE_IC and KEYED_STORE_IC use Code::extra_ic_state() to mark
// ICs as strict mode. The strict-ness of the IC must be preserved.
- Code* old_target = GetTargetAtAddress(address);
if (old_target->kind() == Code::STORE_IC ||
old_target->kind() == Code::KEYED_STORE_IC) {
- ASSERT(old_target->extra_ic_state() == target->extra_ic_state());
+ ASSERT(Code::GetStrictMode(old_target->extra_ic_state()) ==
+ Code::GetStrictMode(target->extra_ic_state()));
}
#endif
Assembler::set_target_address_at(address, target->instruction_start());
target->GetHeap()->incremental_marking()->RecordCodeTargetPatch(address,
target);
+ PostPatching(address, target, old_target);
}
diff --git a/src/3rdparty/v8/src/ic.cc b/src/3rdparty/v8/src/ic.cc
index 3b6ec67..b8d4b40 100644
--- a/src/3rdparty/v8/src/ic.cc
+++ b/src/3rdparty/v8/src/ic.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,13 +40,13 @@ namespace v8 {
namespace internal {
#ifdef DEBUG
-static char TransitionMarkFromState(IC::State state) {
+char IC::TransitionMarkFromState(IC::State state) {
switch (state) {
case UNINITIALIZED: return '0';
case PREMONOMORPHIC: return 'P';
case MONOMORPHIC: return '1';
case MONOMORPHIC_PROTOTYPE_FAILURE: return '^';
- case MEGAMORPHIC: return 'N';
+ case MEGAMORPHIC: return IsGeneric() ? 'G' : 'N';
// We never see the debugger states here, because the state is
// computed from the original code - not the patched code. Let
@@ -80,33 +80,35 @@ void IC::TraceIC(const char* type,
raw_frame = it.frame();
}
}
- if (raw_frame->is_java_script()) {
- JavaScriptFrame* frame = JavaScriptFrame::cast(raw_frame);
- Code* js_code = frame->unchecked_code();
- // Find the function on the stack and both the active code for the
- // function and the original code.
- JSFunction* function = JSFunction::cast(frame->function());
- function->PrintName();
- int code_offset =
- static_cast<int>(address() - js_code->instruction_start());
- PrintF("+%d", code_offset);
- } else {
- PrintF("<unknown>");
- }
- PrintF(" (%c->%c)",
+ JavaScriptFrame::PrintTop(stdout, false, true);
+ bool new_can_grow =
+ Code::GetKeyedAccessGrowMode(new_target->extra_ic_state()) ==
+ ALLOW_JSARRAY_GROWTH;
+ PrintF(" (%c->%c%s)",
TransitionMarkFromState(old_state),
- TransitionMarkFromState(new_state));
+ TransitionMarkFromState(new_state),
+ new_can_grow ? ".GROW" : "");
name->Print();
PrintF("]\n");
}
}
-#endif // DEBUG
+#define TRACE_GENERIC_IC(type, reason) \
+ do { \
+ if (FLAG_trace_ic) { \
+ PrintF("[%s patching generic stub in ", type); \
+ JavaScriptFrame::PrintTop(stdout, false, true); \
+ PrintF(" (%s)]\n", reason); \
+ } \
+ } while (false)
+
+#else
+#define TRACE_GENERIC_IC(type, reason)
+#endif // DEBUG
#define TRACE_IC(type, name, old_state, new_target) \
ASSERT((TraceIC(type, name, old_state, new_target), true))
-
IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) {
ASSERT(isolate == Isolate::Current());
// To improve the performance of the (much used) IC code, we unfold
@@ -137,7 +139,7 @@ IC::IC(FrameDepth depth, Isolate* isolate) : isolate_(isolate) {
#ifdef ENABLE_DEBUGGER_SUPPORT
-Address IC::OriginalCodeAddress() {
+Address IC::OriginalCodeAddress() const {
HandleScope scope;
// Compute the JavaScript frame for the frame pointer of this IC
// structure. We need this to be able to find the function
@@ -294,6 +296,47 @@ Failure* IC::ReferenceError(const char* type, Handle<String> name) {
}
+static int ComputeTypeInfoCountDelta(IC::State old_state, IC::State new_state) {
+ bool was_uninitialized =
+ old_state == UNINITIALIZED || old_state == PREMONOMORPHIC;
+ bool is_uninitialized =
+ new_state == UNINITIALIZED || new_state == PREMONOMORPHIC;
+ return (was_uninitialized && !is_uninitialized) ? 1 :
+ (!was_uninitialized && is_uninitialized) ? -1 : 0;
+}
+
+
+void IC::PostPatching(Address address, Code* target, Code* old_target) {
+ if (FLAG_type_info_threshold == 0 && !FLAG_watch_ic_patching) {
+ return;
+ }
+ Code* host = target->GetHeap()->isolate()->
+ inner_pointer_to_code_cache()->GetCacheEntry(address)->code;
+ if (host->kind() != Code::FUNCTION) return;
+
+ if (FLAG_type_info_threshold > 0 &&
+ old_target->is_inline_cache_stub() &&
+ target->is_inline_cache_stub()) {
+ int delta = ComputeTypeInfoCountDelta(old_target->ic_state(),
+ target->ic_state());
+ // Not all Code objects have TypeFeedbackInfo.
+ if (delta != 0 && host->type_feedback_info()->IsTypeFeedbackInfo()) {
+ TypeFeedbackInfo* info =
+ TypeFeedbackInfo::cast(host->type_feedback_info());
+ info->set_ic_with_type_info_count(
+ info->ic_with_type_info_count() + delta);
+ }
+ }
+ if (FLAG_watch_ic_patching) {
+ host->set_profiler_ticks(0);
+ Isolate::Current()->runtime_profiler()->NotifyICChanged();
+ }
+ // TODO(2029): When an optimized function is patched, it would
+ // be nice to propagate the corresponding type information to its
+ // unoptimized version for the benefit of later inlining.
+}
+
+
void IC::Clear(Address address) {
Code* target = GetTargetAtAddress(address);
@@ -352,7 +395,7 @@ void LoadIC::Clear(Address address, Code* target) {
void StoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
SetTargetAtAddress(address,
- (target->extra_ic_state() == kStrictMode)
+ (Code::GetStrictMode(target->extra_ic_state()) == kStrictMode)
? initialize_stub_strict()
: initialize_stub());
}
@@ -361,7 +404,7 @@ void StoreIC::Clear(Address address, Code* target) {
void KeyedStoreIC::Clear(Address address, Code* target) {
if (target->ic_state() == UNINITIALIZED) return;
SetTargetAtAddress(address,
- (target->extra_ic_state() == kStrictMode)
+ (Code::GetStrictMode(target->extra_ic_state()) == kStrictMode)
? initialize_stub_strict()
: initialize_stub());
}
@@ -435,7 +478,7 @@ void CallICBase::ReceiverToObjectIfRequired(Handle<Object> callee,
if (callee->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(callee);
- if (function->shared()->strict_mode() || function->IsBuiltin()) {
+ if (!function->shared()->is_classic_mode() || function->IsBuiltin()) {
// Do not wrap receiver for strict mode functions or for builtins.
return;
}
@@ -622,7 +665,7 @@ Handle<Code> CallICBase::ComputeMonomorphicStub(LookupResult* lookup,
// applicable.
if (!holder.is_identical_to(receiver)) return Handle<Code>::null();
return isolate()->stub_cache()->ComputeCallNormal(
- argc, kind_, extra_state, IsQmlGlobal(holder));
+ argc, kind_, extra_state);
}
break;
}
@@ -862,7 +905,7 @@ MaybeObject* LoadIC::Load(State state,
}
PropertyAttributes attr;
- if (lookup.IsProperty() &&
+ if (lookup.IsFound() &&
(lookup.type() == INTERCEPTOR || lookup.type() == HANDLER)) {
// Get the property.
Handle<Object> result =
@@ -914,7 +957,7 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
name, receiver, holder, lookup->GetFieldIndex());
break;
case CONSTANT_FUNCTION: {
- Handle<Object> constant(lookup->GetConstantFunction());
+ Handle<JSFunction> constant(lookup->GetConstantFunction());
code = isolate()->stub_cache()->ComputeLoadConstant(
name, receiver, holder, constant);
break;
@@ -960,6 +1003,15 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
state == MONOMORPHIC_PROTOTYPE_FAILURE) {
set_target(*code);
} else if (state == MONOMORPHIC) {
+ // We are transitioning from monomorphic to megamorphic case.
+ // Place the current monomorphic stub and stub compiled for
+ // the receiver into stub cache.
+ Map* map = target()->FindFirstMap();
+ if (map != NULL) {
+ isolate()->stub_cache()->Set(*name, map, target());
+ }
+ isolate()->stub_cache()->Set(*name, receiver->map(), *code);
+
set_target(*megamorphic_stub());
} else if (state == MEGAMORPHIC) {
// Cache code holding map should be consistent with
@@ -973,19 +1025,22 @@ void LoadIC::UpdateCaches(LookupResult* lookup,
Handle<Code> KeyedLoadIC::GetElementStubWithoutMapCheck(
bool is_js_array,
- ElementsKind elements_kind) {
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
+ ASSERT(grow_mode == DO_NOT_ALLOW_JSARRAY_GROWTH);
return KeyedLoadElementStub(elements_kind).GetCode();
}
Handle<Code> KeyedLoadIC::ComputePolymorphicStub(
MapHandleList* receiver_maps,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode growth_mode) {
CodeHandleList handler_ics(receiver_maps->length());
for (int i = 0; i < receiver_maps->length(); ++i) {
Handle<Map> receiver_map = receiver_maps->at(i);
Handle<Code> cached_stub = ComputeMonomorphicStubWithoutMapCheck(
- receiver_map, strict_mode);
+ receiver_map, strict_mode, growth_mode);
handler_ics.Add(cached_stub);
}
KeyedLoadStubCompiler compiler(isolate());
@@ -1085,7 +1140,7 @@ MaybeObject* KeyedLoadIC::Load(State state,
}
PropertyAttributes attr;
- if (lookup.IsProperty() && lookup.type() == INTERCEPTOR) {
+ if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
// Get the property.
Handle<Object> result =
Object::GetProperty(object, object, &lookup, name, &attr);
@@ -1123,6 +1178,8 @@ MaybeObject* KeyedLoadIC::Load(State state,
stub = ComputeStub(receiver, LOAD, kNonStrictMode, stub);
}
}
+ } else {
+ TRACE_GENERIC_IC("KeyedLoadIC", "force generic");
}
if (!stub.is_null()) set_target(*stub);
}
@@ -1163,7 +1220,7 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
name, receiver, holder, lookup->GetFieldIndex());
break;
case CONSTANT_FUNCTION: {
- Handle<Object> constant(lookup->GetConstantFunction());
+ Handle<JSFunction> constant(lookup->GetConstantFunction());
code = isolate()->stub_cache()->ComputeKeyedLoadConstant(
name, receiver, holder, constant);
break;
@@ -1206,10 +1263,12 @@ void KeyedLoadIC::UpdateCaches(LookupResult* lookup,
static bool StoreICableLookup(LookupResult* lookup) {
// Bail out if we didn't find a result.
- if (!lookup->IsPropertyOrTransition() || !lookup->IsCacheable()) return false;
+ if (!lookup->IsFound() || lookup->type() == NULL_DESCRIPTOR) return false;
- // If the property is read-only, we leave the IC in its current
- // state.
+ // Bail out if inline caching is not allowed.
+ if (!lookup->IsCacheable()) return false;
+
+ // If the property is read-only, we leave the IC in its current state.
if (lookup->IsReadOnly()) return false;
return true;
@@ -1267,15 +1326,19 @@ MaybeObject* StoreIC::Store(State state,
// Check if the given name is an array index.
uint32_t index;
if (name->AsArrayIndex(&index)) {
- Handle<Object> result = SetElement(receiver, index, value, strict_mode);
+ Handle<Object> result =
+ JSObject::SetElement(receiver, index, value, NONE, strict_mode);
RETURN_IF_EMPTY_HANDLE(isolate(), result);
return *value;
}
- // Use specialized code for setting the length of arrays.
- if (receiver->IsJSArray()
- && name->Equals(isolate()->heap()->length_symbol())
- && Handle<JSArray>::cast(receiver)->AllowsSetElementsLength()) {
+ // Use specialized code for setting the length of arrays with fast
+ // properties. Slow properties might indicate redefinition of the
+ // length property.
+ if (receiver->IsJSArray() &&
+ name->Equals(isolate()->heap()->length_symbol()) &&
+ Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() &&
+ receiver->HasFastProperties()) {
#ifdef DEBUG
if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n");
#endif
@@ -1297,7 +1360,7 @@ MaybeObject* StoreIC::Store(State state,
// Strict mode doesn't allow setting non-existent global property
// or an assignment to a read only property.
if (strict_mode == kStrictMode) {
- if (lookup.IsFound() && lookup.IsReadOnly()) {
+ if (lookup.IsProperty() && lookup.IsReadOnly()) {
return TypeError("strict_read_only_property", object, name);
} else if (IsContextual(object)) {
return ReferenceError("not_defined", name);
@@ -1330,10 +1393,12 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value) {
- // Skip JSGlobalProxy.
ASSERT(!receiver->IsJSGlobalProxy());
-
ASSERT(StoreICableLookup(lookup));
+ // These are not cacheable, so we never see such LookupResults here.
+ ASSERT(lookup->type() != HANDLER);
+ // We get only called for properties or transitions, see StoreICableLookup.
+ ASSERT(lookup->type() != NULL_DESCRIPTOR);
// If the property has a non-field type allowing map transitions
// where there is extra room in the object, we leave the IC in its
@@ -1354,7 +1419,6 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
break;
case MAP_TRANSITION: {
if (lookup->GetAttributes() != NONE) return;
- ASSERT(type == MAP_TRANSITION);
Handle<Map> transition(lookup->GetTransitionMap());
int index = transition->PropertyIndexFor(*name);
code = isolate()->stub_cache()->ComputeStoreField(
@@ -1390,7 +1454,13 @@ void StoreIC::UpdateCaches(LookupResult* lookup,
code = isolate()->stub_cache()->ComputeStoreInterceptor(
name, receiver, strict_mode);
break;
- default:
+ case CONSTANT_FUNCTION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ return;
+ case HANDLER:
+ case NULL_DESCRIPTOR:
+ UNREACHABLE();
return;
}
@@ -1455,29 +1525,51 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
StrictModeFlag strict_mode,
Handle<Code> generic_stub) {
State ic_state = target()->ic_state();
- if ((ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) &&
- !IsTransitionStubKind(stub_kind)) {
- return ComputeMonomorphicStub(
- receiver, stub_kind, strict_mode, generic_stub);
- }
- ASSERT(target() != *generic_stub);
+ KeyedAccessGrowMode grow_mode = IsGrowStubKind(stub_kind)
+ ? ALLOW_JSARRAY_GROWTH
+ : DO_NOT_ALLOW_JSARRAY_GROWTH;
// Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
// via megamorphic stubs, since they don't have a map in their relocation info
// and so the stubs can't be harvested for the object needed for a map check.
if (target()->type() != NORMAL) {
+ TRACE_GENERIC_IC("KeyedIC", "non-NORMAL target type");
return generic_stub;
}
- // Determine the list of receiver maps that this call site has seen,
- // adding the map that was just encountered.
+ bool monomorphic = false;
MapHandleList target_receiver_maps;
- Handle<Map> receiver_map(receiver->map());
- if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
- target_receiver_maps.Add(receiver_map);
- } else {
+ if (ic_state != UNINITIALIZED && ic_state != PREMONOMORPHIC) {
GetReceiverMapsForStub(Handle<Code>(target()), &target_receiver_maps);
}
+ if (!IsTransitionStubKind(stub_kind)) {
+ if (ic_state == UNINITIALIZED || ic_state == PREMONOMORPHIC) {
+ monomorphic = true;
+ } else {
+ if (ic_state == MONOMORPHIC) {
+ // The first time a receiver is seen that is a transitioned version of
+ // the previous monomorphic receiver type, assume the new ElementsKind
+ // is the monomorphic type. This benefits global arrays that only
+ // transition once, and all call sites accessing them are faster if they
+ // remain monomorphic. If this optimistic assumption is not true, the IC
+ // will miss again and it will become polymorphic and support both the
+ // untransitioned and transitioned maps.
+ monomorphic = IsMoreGeneralElementsKindTransition(
+ target_receiver_maps.at(0)->elements_kind(),
+ receiver->GetElementsKind());
+ }
+ }
+ }
+
+ if (monomorphic) {
+ return ComputeMonomorphicStub(
+ receiver, stub_kind, strict_mode, generic_stub);
+ }
+ ASSERT(target() != *generic_stub);
+
+ // Determine the list of receiver maps that this call site has seen,
+ // adding the map that was just encountered.
+ Handle<Map> receiver_map(receiver->map());
bool map_added =
AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
if (IsTransitionStubKind(stub_kind)) {
@@ -1487,23 +1579,32 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
if (!map_added) {
// If the miss wasn't due to an unseen map, a polymorphic stub
// won't help, use the generic stub.
+ TRACE_GENERIC_IC("KeyedIC", "same map added twice");
return generic_stub;
}
// If the maximum number of receiver maps has been exceeded, use the generic
// version of the IC.
if (target_receiver_maps.length() > kMaxKeyedPolymorphism) {
+ TRACE_GENERIC_IC("KeyedIC", "max polymorph exceeded");
return generic_stub;
}
+ if ((Code::GetKeyedAccessGrowMode(target()->extra_ic_state()) ==
+ ALLOW_JSARRAY_GROWTH)) {
+ grow_mode = ALLOW_JSARRAY_GROWTH;
+ }
+
Handle<PolymorphicCodeCache> cache =
isolate()->factory()->polymorphic_code_cache();
- Code::Flags flags = Code::ComputeFlags(kind(), MEGAMORPHIC, strict_mode);
+ Code::ExtraICState extra_state = Code::ComputeExtraICState(grow_mode,
+ strict_mode);
+ Code::Flags flags = Code::ComputeFlags(kind(), MEGAMORPHIC, extra_state);
Handle<Object> probe = cache->Lookup(&target_receiver_maps, flags);
if (probe->IsCode()) return Handle<Code>::cast(probe);
Handle<Code> stub =
- ComputePolymorphicStub(&target_receiver_maps, strict_mode);
+ ComputePolymorphicStub(&target_receiver_maps, strict_mode, grow_mode);
PolymorphicCodeCache::Update(cache, &target_receiver_maps, flags, stub);
return stub;
}
@@ -1511,7 +1612,8 @@ Handle<Code> KeyedIC::ComputeStub(Handle<JSObject> receiver,
Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
Handle<Map> receiver_map,
- StrictModeFlag strict_mode) {
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode) {
if ((receiver_map->instance_type() & kNotStringTag) == 0) {
ASSERT(!string_stub().is_null());
return string_stub();
@@ -1523,7 +1625,8 @@ Handle<Code> KeyedIC::ComputeMonomorphicStubWithoutMapCheck(
receiver_map->has_external_array_elements());
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
return GetElementStubWithoutMapCheck(is_js_array,
- receiver_map->elements_kind());
+ receiver_map->elements_kind(),
+ grow_mode);
}
}
@@ -1550,9 +1653,12 @@ Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver,
switch (stub_kind) {
case KeyedIC::STORE_TRANSITION_SMI_TO_OBJECT:
case KeyedIC::STORE_TRANSITION_DOUBLE_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT:
return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS);
break;
case KeyedIC::STORE_TRANSITION_SMI_TO_DOUBLE:
+ case KeyedIC::STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE:
return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS);
break;
default:
@@ -1564,13 +1670,16 @@ Handle<Map> KeyedIC::ComputeTransitionedMap(Handle<JSObject> receiver,
Handle<Code> KeyedStoreIC::GetElementStubWithoutMapCheck(
bool is_js_array,
- ElementsKind elements_kind) {
- return KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
+ return KeyedStoreElementStub(is_js_array, elements_kind, grow_mode).GetCode();
}
-Handle<Code> KeyedStoreIC::ComputePolymorphicStub(MapHandleList* receiver_maps,
- StrictModeFlag strict_mode) {
+Handle<Code> KeyedStoreIC::ComputePolymorphicStub(
+ MapHandleList* receiver_maps,
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode) {
// Collect MONOMORPHIC stubs for all target_receiver_maps.
CodeHandleList handler_ics(receiver_maps->length());
MapHandleList transitioned_maps(receiver_maps->length());
@@ -1584,16 +1693,17 @@ Handle<Code> KeyedStoreIC::ComputePolymorphicStub(MapHandleList* receiver_maps,
receiver_map->elements_kind(), // original elements_kind
transitioned_map->elements_kind(),
receiver_map->instance_type() == JS_ARRAY_TYPE, // is_js_array
- strict_mode).GetCode();
+ strict_mode, grow_mode).GetCode();
} else {
cached_stub = ComputeMonomorphicStubWithoutMapCheck(receiver_map,
- strict_mode);
+ strict_mode,
+ grow_mode);
}
ASSERT(!cached_stub.is_null());
handler_ics.Add(cached_stub);
transitioned_maps.Add(transitioned_map);
}
- KeyedStoreStubCompiler compiler(isolate(), strict_mode);
+ KeyedStoreStubCompiler compiler(isolate(), strict_mode, grow_mode);
Handle<Code> code = compiler.CompileStorePolymorphic(
receiver_maps, &handler_ics, &transitioned_maps);
isolate()->counters()->keyed_store_polymorphic_stubs()->Increment();
@@ -1603,6 +1713,48 @@ Handle<Code> KeyedStoreIC::ComputePolymorphicStub(MapHandleList* receiver_maps,
}
+KeyedIC::StubKind KeyedStoreIC::GetStubKind(Handle<JSObject> receiver,
+ Handle<Object> key,
+ Handle<Object> value) {
+ ASSERT(key->IsSmi());
+ int index = Smi::cast(*key)->value();
+ bool allow_growth = receiver->IsJSArray() &&
+ JSArray::cast(*receiver)->length()->IsSmi() &&
+ index >= Smi::cast(JSArray::cast(*receiver)->length())->value();
+
+ if (allow_growth) {
+ // Handle growing array in stub if necessary.
+ if (receiver->HasFastSmiOnlyElements()) {
+ if (value->IsHeapNumber()) {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
+ }
+ if (value->IsHeapObject()) {
+ return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
+ }
+ } else if (receiver->HasFastDoubleElements()) {
+ if (!value->IsSmi() && !value->IsHeapNumber()) {
+ return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
+ }
+ }
+ return STORE_AND_GROW_NO_TRANSITION;
+ } else {
+ // Handle only in-bounds elements accesses.
+ if (receiver->HasFastSmiOnlyElements()) {
+ if (value->IsHeapNumber()) {
+ return STORE_TRANSITION_SMI_TO_DOUBLE;
+ } else if (value->IsHeapObject()) {
+ return STORE_TRANSITION_SMI_TO_OBJECT;
+ }
+ } else if (receiver->HasFastDoubleElements()) {
+ if (!value->IsSmi() && !value->IsHeapNumber()) {
+ return STORE_TRANSITION_DOUBLE_TO_OBJECT;
+ }
+ }
+ return STORE_NO_TRANSITION;
+ }
+}
+
+
MaybeObject* KeyedStoreIC::Store(State state,
StrictModeFlag strict_mode,
Handle<Object> object,
@@ -1631,18 +1783,18 @@ MaybeObject* KeyedStoreIC::Store(State state,
// Check if the given name is an array index.
uint32_t index;
if (name->AsArrayIndex(&index)) {
- Handle<Object> result = SetElement(receiver, index, value, strict_mode);
+ Handle<Object> result =
+ JSObject::SetElement(receiver, index, value, NONE, strict_mode);
RETURN_IF_EMPTY_HANDLE(isolate(), result);
return *value;
}
- // Lookup the property locally in the receiver.
- LookupResult lookup(isolate());
- receiver->LocalLookup(*name, &lookup);
-
// Update inline cache and stub cache.
- if (FLAG_use_ic) {
- UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
+ if (FLAG_use_ic && !receiver->IsJSGlobalProxy()) {
+ LookupResult lookup(isolate());
+ if (LookupForWrite(receiver, name, &lookup)) {
+ UpdateCaches(&lookup, state, strict_mode, receiver, name, value);
+ }
}
// Set the property.
@@ -1665,20 +1817,11 @@ MaybeObject* KeyedStoreIC::Store(State state,
stub = non_strict_arguments_stub();
} else if (!force_generic) {
if (key->IsSmi() && (target() != *non_strict_arguments_stub())) {
- StubKind stub_kind = STORE_NO_TRANSITION;
- if (receiver->GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
- if (value->IsHeapNumber()) {
- stub_kind = STORE_TRANSITION_SMI_TO_DOUBLE;
- } else if (value->IsHeapObject()) {
- stub_kind = STORE_TRANSITION_SMI_TO_OBJECT;
- }
- } else if (receiver->GetElementsKind() == FAST_DOUBLE_ELEMENTS) {
- if (!value->IsSmi() && !value->IsHeapNumber()) {
- stub_kind = STORE_TRANSITION_DOUBLE_TO_OBJECT;
- }
- }
+ StubKind stub_kind = GetStubKind(receiver, key, value);
stub = ComputeStub(receiver, stub_kind, strict_mode, stub);
}
+ } else {
+ TRACE_GENERIC_IC("KeyedStoreIC", "force generic");
}
}
if (!stub.is_null()) set_target(*stub);
@@ -1698,15 +1841,12 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
Handle<JSObject> receiver,
Handle<String> name,
Handle<Object> value) {
- // Skip JSGlobalProxy.
- if (receiver->IsJSGlobalProxy()) return;
-
- // Bail out if we didn't find a result.
- if (!lookup->IsPropertyOrTransition() || !lookup->IsCacheable()) return;
-
- // If the property is read-only, we leave the IC in its current
- // state.
- if (lookup->IsReadOnly()) return;
+ ASSERT(!receiver->IsJSGlobalProxy());
+ ASSERT(StoreICableLookup(lookup));
+ // These are not cacheable, so we never see such LookupResults here.
+ ASSERT(lookup->type() != HANDLER);
+ // We get only called for properties or transitions, see StoreICableLookup.
+ ASSERT(lookup->type() != NULL_DESCRIPTOR);
// If the property has a non-field type allowing map transitions
// where there is extra room in the object, we leave the IC in its
@@ -1726,7 +1866,6 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
break;
case MAP_TRANSITION:
if (lookup->GetAttributes() == NONE) {
- ASSERT(type == MAP_TRANSITION);
Handle<Map> transition(lookup->GetTransitionMap());
int index = transition->PropertyIndexFor(*name);
code = isolate()->stub_cache()->ComputeKeyedStoreField(
@@ -1734,13 +1873,22 @@ void KeyedStoreIC::UpdateCaches(LookupResult* lookup,
break;
}
// fall through.
- default:
+ case NORMAL:
+ case CONSTANT_FUNCTION:
+ case CALLBACKS:
+ case INTERCEPTOR:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
// Always rewrite to the generic case so that we do not
// repeatedly try to rewrite.
code = (strict_mode == kStrictMode)
? generic_stub_strict()
: generic_stub();
break;
+ case HANDLER:
+ case NULL_DESCRIPTOR:
+ UNREACHABLE();
+ return;
}
ASSERT(!code.is_null());
@@ -1852,7 +2000,7 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) {
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
return ic.Store(state,
- static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
+ Code::GetStrictMode(extra_ic_state),
args.at<Object>(0),
args.at<String>(1),
args.at<Object>(2));
@@ -1863,12 +2011,19 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) {
NoHandleAllocation nha;
ASSERT(args.length() == 2);
- JSObject* receiver = JSObject::cast(args[0]);
+ JSArray* receiver = JSArray::cast(args[0]);
Object* len = args[1];
// The generated code should filter out non-Smis before we get here.
ASSERT(len->IsSmi());
+#ifdef DEBUG
+ // The length property has to be a writable callback property.
+ LookupResult debug_lookup(isolate);
+ receiver->LocalLookup(isolate->heap()->length_symbol(), &debug_lookup);
+ ASSERT(debug_lookup.type() == CALLBACKS && !debug_lookup.IsReadOnly());
+#endif
+
Object* result;
{ MaybeObject* maybe_result = receiver->SetElementsLength(len);
if (!maybe_result->ToObject(&result)) return maybe_result;
@@ -1921,7 +2076,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) {
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
return ic.Store(state,
- static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
+ Code::GetStrictMode(extra_ic_state),
args.at<Object>(0),
args.at<Object>(1),
args.at<Object>(2),
@@ -1937,8 +2092,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) {
Handle<Object> object = args.at<Object>(0);
Handle<Object> key = args.at<Object>(1);
Handle<Object> value = args.at<Object>(2);
- StrictModeFlag strict_mode =
- static_cast<StrictModeFlag>(extra_ic_state & kStrictMode);
+ StrictModeFlag strict_mode = Code::GetStrictMode(extra_ic_state);
return Runtime::SetObjectProperty(isolate,
object,
key,
@@ -1955,7 +2109,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissForceGeneric) {
IC::State state = IC::StateFrom(ic.target(), args[0], args[1]);
Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state();
return ic.Store(state,
- static_cast<StrictModeFlag>(extra_ic_state & kStrictMode),
+ Code::GetStrictMode(extra_ic_state),
args.at<Object>(0),
args.at<Object>(1),
args.at<Object>(2),
@@ -2304,6 +2458,7 @@ const char* CompareIC::GetStateName(State state) {
case SMIS: return "SMIS";
case HEAP_NUMBERS: return "HEAP_NUMBERS";
case OBJECTS: return "OBJECTS";
+ case KNOWN_OBJECTS: return "OBJECTS";
case SYMBOLS: return "SYMBOLS";
case STRINGS: return "STRINGS";
case GENERIC: return "GENERIC";
@@ -2318,19 +2473,50 @@ CompareIC::State CompareIC::TargetState(State state,
bool has_inlined_smi_code,
Handle<Object> x,
Handle<Object> y) {
- if (!has_inlined_smi_code && state != UNINITIALIZED && state != SYMBOLS) {
- return GENERIC;
+ switch (state) {
+ case UNINITIALIZED:
+ if (x->IsSmi() && y->IsSmi()) return SMIS;
+ if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ // Ordered comparisons treat undefined as NaN, so the
+ // HEAP_NUMBER stub will do the right thing.
+ if ((x->IsNumber() && y->IsUndefined()) ||
+ (y->IsNumber() && x->IsUndefined())) {
+ return HEAP_NUMBERS;
+ }
+ }
+ if (x->IsSymbol() && y->IsSymbol()) {
+ // We compare symbols as strings if we need to determine
+ // the order in a non-equality compare.
+ return Token::IsEqualityOp(op_) ? SYMBOLS : STRINGS;
+ }
+ if (x->IsString() && y->IsString()) return STRINGS;
+ if (!Token::IsEqualityOp(op_)) return GENERIC;
+ if (x->IsJSObject() && y->IsJSObject()) {
+ if (Handle<JSObject>::cast(x)->map() ==
+ Handle<JSObject>::cast(y)->map() &&
+ Token::IsEqualityOp(op_)) {
+ return KNOWN_OBJECTS;
+ } else {
+ return OBJECTS;
+ }
+ }
+ return GENERIC;
+ case SMIS:
+ return has_inlined_smi_code && x->IsNumber() && y->IsNumber()
+ ? HEAP_NUMBERS
+ : GENERIC;
+ case SYMBOLS:
+ ASSERT(Token::IsEqualityOp(op_));
+ return x->IsString() && y->IsString() ? STRINGS : GENERIC;
+ case HEAP_NUMBERS:
+ case STRINGS:
+ case OBJECTS:
+ case KNOWN_OBJECTS:
+ case GENERIC:
+ return GENERIC;
}
- if (state == UNINITIALIZED && x->IsSmi() && y->IsSmi()) return SMIS;
- if ((state == UNINITIALIZED || (state == SMIS && has_inlined_smi_code)) &&
- x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
- if (op_ != Token::EQ && op_ != Token::EQ_STRICT) return GENERIC;
- if (state == UNINITIALIZED &&
- x->IsSymbol() && y->IsSymbol()) return SYMBOLS;
- if ((state == UNINITIALIZED || state == SYMBOLS) &&
- x->IsString() && y->IsString()) return STRINGS;
- if (state == UNINITIALIZED &&
- x->IsJSObject() && y->IsJSObject()) return OBJECTS;
+ UNREACHABLE();
return GENERIC;
}
diff --git a/src/3rdparty/v8/src/ic.h b/src/3rdparty/v8/src/ic.h
index ee53f0f..5662552 100644
--- a/src/3rdparty/v8/src/ic.h
+++ b/src/3rdparty/v8/src/ic.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -91,10 +91,13 @@ class IC {
// Construct the IC structure with the given number of extra
// JavaScript frames on the stack.
IC(FrameDepth depth, Isolate* isolate);
+ virtual ~IC() {}
// Get the call-site target; used for determining the state.
- Code* target() { return GetTargetAtAddress(address()); }
- inline Address address();
+ Code* target() const { return GetTargetAtAddress(address()); }
+ inline Address address() const;
+
+ virtual bool IsGeneric() const { return false; }
// Compute the current IC state based on the target stub, receiver and name.
static State StateFrom(Code* target, Object* receiver, Object* name);
@@ -107,16 +110,10 @@ class IC {
// object that contains this IC site.
RelocInfo::Mode ComputeMode();
- bool IsQmlGlobal(Handle<Object> receiver) {
- JSObject* qml_global = isolate_->context()->qml_global();
- return !qml_global->IsUndefined() && qml_global == *receiver;
- }
-
// Returns if this IC is for contextual (no explicit receiver)
// access to properties.
bool IsContextual(Handle<Object> receiver) {
- if (receiver->IsGlobalObject() ||
- IsQmlGlobal(receiver)) {
+ if (receiver->IsGlobalObject()) {
return SlowIsContextual();
} else {
ASSERT(!SlowIsContextual());
@@ -145,13 +142,15 @@ class IC {
#ifdef ENABLE_DEBUGGER_SUPPORT
// Computes the address in the original code when the code running is
// containing break points (calls to DebugBreakXXX builtins).
- Address OriginalCodeAddress();
+ Address OriginalCodeAddress() const;
#endif
// Set the call-site target.
void set_target(Code* code) { SetTargetAtAddress(address(), code); }
#ifdef DEBUG
+ char TransitionMarkFromState(IC::State state);
+
void TraceIC(const char* type,
Handle<Object> name,
State old_state,
@@ -166,6 +165,7 @@ class IC {
// Access the target code for the given IC address.
static inline Code* GetTargetAtAddress(Address address);
static inline void SetTargetAtAddress(Address address, Code* target);
+ static void PostPatching(Address address, Code* target, Code* old_target);
private:
// Frame pointer for the frame that uses (calls) the IC.
@@ -377,14 +377,48 @@ class KeyedIC: public IC {
STORE_NO_TRANSITION,
STORE_TRANSITION_SMI_TO_OBJECT,
STORE_TRANSITION_SMI_TO_DOUBLE,
- STORE_TRANSITION_DOUBLE_TO_OBJECT
+ STORE_TRANSITION_DOUBLE_TO_OBJECT,
+ STORE_AND_GROW_NO_TRANSITION,
+ STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT,
+ STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE,
+ STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT
};
+
+ static const int kGrowICDelta = STORE_AND_GROW_NO_TRANSITION -
+ STORE_NO_TRANSITION;
+ STATIC_ASSERT(kGrowICDelta ==
+ STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT -
+ STORE_TRANSITION_SMI_TO_OBJECT);
+ STATIC_ASSERT(kGrowICDelta ==
+ STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE -
+ STORE_TRANSITION_SMI_TO_DOUBLE);
+ STATIC_ASSERT(kGrowICDelta ==
+ STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT -
+ STORE_TRANSITION_DOUBLE_TO_OBJECT);
+
explicit KeyedIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) {}
virtual ~KeyedIC() {}
+ static inline KeyedAccessGrowMode GetGrowModeFromStubKind(
+ StubKind stub_kind) {
+ return (stub_kind >= STORE_AND_GROW_NO_TRANSITION)
+ ? ALLOW_JSARRAY_GROWTH
+ : DO_NOT_ALLOW_JSARRAY_GROWTH;
+ }
+
+ static inline StubKind GetGrowStubKind(StubKind stub_kind) {
+ ASSERT(stub_kind != LOAD);
+ if (stub_kind < STORE_AND_GROW_NO_TRANSITION) {
+ stub_kind = static_cast<StubKind>(static_cast<int>(stub_kind) +
+ kGrowICDelta);
+ }
+ return stub_kind;
+ }
+
virtual Handle<Code> GetElementStubWithoutMapCheck(
bool is_js_array,
- ElementsKind elements_kind) = 0;
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) = 0;
protected:
virtual Handle<Code> string_stub() {
@@ -398,12 +432,15 @@ class KeyedIC: public IC {
StrictModeFlag strict_mode,
Handle<Code> default_stub);
- virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
- StrictModeFlag strict_mode) = 0;
+ virtual Handle<Code> ComputePolymorphicStub(
+ MapHandleList* receiver_maps,
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode) = 0;
Handle<Code> ComputeMonomorphicStubWithoutMapCheck(
Handle<Map> receiver_map,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode);
private:
void GetReceiverMapsForStub(Handle<Code> stub, MapHandleList* result);
@@ -417,7 +454,12 @@ class KeyedIC: public IC {
StubKind stub_kind);
static bool IsTransitionStubKind(StubKind stub_kind) {
- return stub_kind > STORE_NO_TRANSITION;
+ return stub_kind > STORE_NO_TRANSITION &&
+ stub_kind != STORE_AND_GROW_NO_TRANSITION;
+ }
+
+ static bool IsGrowStubKind(StubKind stub_kind) {
+ return stub_kind >= STORE_AND_GROW_NO_TRANSITION;
}
};
@@ -456,13 +498,19 @@ class KeyedLoadIC: public KeyedIC {
virtual Handle<Code> GetElementStubWithoutMapCheck(
bool is_js_array,
- ElementsKind elements_kind);
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode);
+
+ virtual bool IsGeneric() const {
+ return target() == *generic_stub();
+ }
protected:
virtual Code::Kind kind() const { return Code::KEYED_LOAD_IC; }
virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode);
virtual Handle<Code> string_stub() {
return isolate()->builtins()->KeyedLoadIC_String();
@@ -483,7 +531,7 @@ class KeyedLoadIC: public KeyedIC {
Handle<Code> megamorphic_stub() {
return isolate()->builtins()->KeyedLoadIC_Generic();
}
- Handle<Code> generic_stub() {
+ Handle<Code> generic_stub() const {
return isolate()->builtins()->KeyedLoadIC_Generic();
}
Handle<Code> pre_monomorphic_stub() {
@@ -536,8 +584,8 @@ class StoreIC: public IC {
void set_target(Code* code) {
// Strict mode must be preserved across IC patching.
- ASSERT((code->extra_ic_state() & kStrictMode) ==
- (target()->extra_ic_state() & kStrictMode));
+ ASSERT(Code::GetStrictMode(code->extra_ic_state()) ==
+ Code::GetStrictMode(target()->extra_ic_state()));
IC::set_target(code);
}
@@ -599,13 +647,20 @@ class KeyedStoreIC: public KeyedIC {
virtual Handle<Code> GetElementStubWithoutMapCheck(
bool is_js_array,
- ElementsKind elements_kind);
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode);
+
+ virtual bool IsGeneric() const {
+ return target() == *generic_stub() ||
+ target() == *generic_stub_strict();
+ }
protected:
virtual Code::Kind kind() const { return Code::KEYED_STORE_IC; }
virtual Handle<Code> ComputePolymorphicStub(MapHandleList* receiver_maps,
- StrictModeFlag strict_mode);
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode);
private:
// Update the inline cache.
@@ -618,8 +673,8 @@ class KeyedStoreIC: public KeyedIC {
void set_target(Code* code) {
// Strict mode must be preserved across IC patching.
- ASSERT((code->extra_ic_state() & kStrictMode) ==
- (target()->extra_ic_state() & kStrictMode));
+ ASSERT(Code::GetStrictMode(code->extra_ic_state()) ==
+ Code::GetStrictMode(target()->extra_ic_state()));
IC::set_target(code);
}
@@ -638,10 +693,10 @@ class KeyedStoreIC: public KeyedIC {
Handle<Code> megamorphic_stub_strict() {
return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
}
- Handle<Code> generic_stub() {
+ Handle<Code> generic_stub() const {
return isolate()->builtins()->KeyedStoreIC_Generic();
}
- Handle<Code> generic_stub_strict() {
+ Handle<Code> generic_stub_strict() const {
return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
}
Handle<Code> non_strict_arguments_stub() {
@@ -650,6 +705,10 @@ class KeyedStoreIC: public KeyedIC {
static void Clear(Address address, Code* target);
+ StubKind GetStubKind(Handle<JSObject> receiver,
+ Handle<Object> key,
+ Handle<Object> value);
+
friend class IC;
};
@@ -716,6 +775,7 @@ class CompareIC: public IC {
SYMBOLS,
STRINGS,
OBJECTS,
+ KNOWN_OBJECTS,
GENERIC
};
diff --git a/src/3rdparty/v8/src/incremental-marking-inl.h b/src/3rdparty/v8/src/incremental-marking-inl.h
index c8af236..3e3d6c4 100644
--- a/src/3rdparty/v8/src/incremental-marking-inl.h
+++ b/src/3rdparty/v8/src/incremental-marking-inl.h
@@ -95,7 +95,8 @@ void IncrementalMarking::BlackToGreyAndUnshift(HeapObject* obj,
ASSERT(IsMarking());
Marking::BlackToGrey(mark_bit);
int obj_size = obj->Size();
- MemoryChunk::IncrementLiveBytes(obj->address(), -obj_size);
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), -obj_size);
+ bytes_scanned_ -= obj_size;
int64_t old_bytes_rescanned = bytes_rescanned_;
bytes_rescanned_ = old_bytes_rescanned + obj_size;
if ((bytes_rescanned_ >> 20) != (old_bytes_rescanned >> 20)) {
diff --git a/src/3rdparty/v8/src/incremental-marking.cc b/src/3rdparty/v8/src/incremental-marking.cc
index 6d2f393..7bbd521 100644
--- a/src/3rdparty/v8/src/incremental-marking.cc
+++ b/src/3rdparty/v8/src/incremental-marking.cc
@@ -41,6 +41,7 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
state_(STOPPED),
marking_deque_memory_(NULL),
+ marking_deque_memory_committed_(false),
steps_count_(0),
steps_took_(0),
longest_step_(0.0),
@@ -177,7 +178,12 @@ class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
- Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
+ && (target->ic_age() != heap_->global_ic_age())) {
+ IC::Clear(rinfo->pc());
+ target = Code::GetCodeFromTargetAddress(rinfo->target_address());
+ }
heap_->mark_compact_collector()->RecordRelocSlot(rinfo, Code::cast(target));
MarkObject(target);
}
@@ -199,6 +205,12 @@ class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
MarkObject(target);
}
+ void VisitSharedFunctionInfo(SharedFunctionInfo* shared) {
+ if (shared->ic_age() != heap_->global_ic_age()) {
+ shared->ResetForNewContext(heap_->global_ic_age());
+ }
+ }
+
void VisitPointer(Object** p) {
Object* obj = *p;
if (obj->NonFailureIsHeapObject()) {
@@ -224,8 +236,8 @@ class IncrementalMarkingMarkingVisitor : public ObjectVisitor {
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) {
- MemoryChunk::IncrementLiveBytes(heap_object->address(),
- heap_object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
+ heap_object->Size());
}
} else if (Marking::IsWhite(mark_bit)) {
incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit);
@@ -262,8 +274,8 @@ class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
if (mark_bit.data_only()) {
if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) {
- MemoryChunk::IncrementLiveBytes(heap_object->address(),
- heap_object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(),
+ heap_object->Size());
}
} else {
if (Marking::IsWhite(mark_bit)) {
@@ -395,7 +407,7 @@ bool IncrementalMarking::WorthActivating() {
return !FLAG_expose_gc &&
FLAG_incremental_marking &&
!Serializer::enabled() &&
- heap_->PromotedSpaceSize() > kActivationThreshold;
+ heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold;
}
@@ -417,7 +429,7 @@ void IncrementalMarking::ActivateGeneratedStub(Code* stub) {
static void PatchIncrementalMarkingRecordWriteStubs(
Heap* heap, RecordWriteStub::Mode mode) {
- NumberDictionary* stubs = heap->code_stubs();
+ UnseededNumberDictionary* stubs = heap->code_stubs();
int capacity = stubs->Capacity();
for (int i = 0; i < capacity; i++) {
@@ -440,10 +452,24 @@ static void PatchIncrementalMarkingRecordWriteStubs(
void IncrementalMarking::EnsureMarkingDequeIsCommitted() {
if (marking_deque_memory_ == NULL) {
marking_deque_memory_ = new VirtualMemory(4 * MB);
- marking_deque_memory_->Commit(
+ }
+ if (!marking_deque_memory_committed_) {
+ bool success = marking_deque_memory_->Commit(
reinterpret_cast<Address>(marking_deque_memory_->address()),
marking_deque_memory_->size(),
false); // Not executable.
+ CHECK(success);
+ marking_deque_memory_committed_ = true;
+ }
+}
+
+void IncrementalMarking::UncommitMarkingDeque() {
+ if (state_ == STOPPED && marking_deque_memory_committed_) {
+ bool success = marking_deque_memory_->Uncommit(
+ reinterpret_cast<Address>(marking_deque_memory_->address()),
+ marking_deque_memory_->size());
+ CHECK(success);
+ marking_deque_memory_committed_ = false;
}
}
@@ -476,8 +502,8 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
HeapObject* heap_obj = HeapObject::cast(obj);
MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::cast(obj));
if (Marking::IsBlack(mark_bit)) {
- MemoryChunk::IncrementLiveBytes(heap_obj->address(),
- -heap_obj->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(heap_obj->address(),
+ -heap_obj->Size());
}
Marking::AnyToGrey(mark_bit);
}
@@ -490,7 +516,8 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
}
is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
- heap_->mark_compact_collector()->StartCompaction();
+ heap_->mark_compact_collector()->StartCompaction(
+ MarkCompactCollector::INCREMENTAL_COMPACTION);
state_ = MARKING;
@@ -643,7 +670,7 @@ void IncrementalMarking::Hurry() {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
ASSERT(!Marking::IsBlack(mark_bit));
Marking::MarkBlack(mark_bit);
- MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
}
state_ = COMPLETE;
if (FLAG_trace_incremental_marking) {
@@ -656,17 +683,22 @@ void IncrementalMarking::Hurry() {
if (FLAG_cleanup_code_caches_at_gc) {
PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
- MemoryChunk::IncrementLiveBytes(poly_cache->address(),
- PolymorphicCodeCache::kSize);
+ MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
+ PolymorphicCodeCache::kSize);
}
Object* context = heap_->global_contexts_list();
while (!context->IsUndefined()) {
- NormalizedMapCache* cache = Context::cast(context)->normalized_map_cache();
- MarkBit mark_bit = Marking::MarkBitFrom(cache);
- if (Marking::IsGrey(mark_bit)) {
- Marking::GreyToBlack(mark_bit);
- MemoryChunk::IncrementLiveBytes(cache->address(), cache->Size());
+ // GC can happen when the context is not fully initialized,
+ // so the cache can be undefined.
+ HeapObject* cache = HeapObject::cast(
+ Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
+ if (!cache->IsUndefined()) {
+ MarkBit mark_bit = Marking::MarkBitFrom(cache);
+ if (Marking::IsGrey(mark_bit)) {
+ Marking::GreyToBlack(mark_bit);
+ MemoryChunk::IncrementLiveBytesFromGC(cache->address(), cache->Size());
+ }
}
context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
}
@@ -717,7 +749,7 @@ void IncrementalMarking::Finalize() {
}
-void IncrementalMarking::MarkingComplete() {
+void IncrementalMarking::MarkingComplete(CompletionAction action) {
state_ = COMPLETE;
// We will set the stack guard to request a GC now. This will mean the rest
// of the GC gets performed as soon as possible (we can't do a GC here in a
@@ -728,11 +760,14 @@ void IncrementalMarking::MarkingComplete() {
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Complete (normal).\n");
}
- heap_->isolate()->stack_guard()->RequestGC();
+ if (action == GC_VIA_STACK_GUARD) {
+ heap_->isolate()->stack_guard()->RequestGC();
+ }
}
-void IncrementalMarking::Step(intptr_t allocated_bytes) {
+void IncrementalMarking::Step(intptr_t allocated_bytes,
+ CompletionAction action) {
if (heap_->gc_state() != Heap::NOT_IN_GC ||
!FLAG_incremental_marking ||
!FLAG_incremental_marking_steps ||
@@ -747,6 +782,7 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) {
if (state_ == MARKING && no_marking_scope_depth_ > 0) return;
intptr_t bytes_to_process = allocated_ * allocation_marking_factor_;
+ bytes_scanned_ += bytes_to_process;
double start = 0;
@@ -755,8 +791,8 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) {
}
if (state_ == SWEEPING) {
- if (heap_->old_pointer_space()->AdvanceSweeper(bytes_to_process) &&
- heap_->old_data_space()->AdvanceSweeper(bytes_to_process)) {
+ if (heap_->AdvanceSweepers(static_cast<int>(bytes_to_process))) {
+ bytes_scanned_ = 0;
StartMarking(PREVENT_COMPACTION);
}
} else if (state_ == MARKING) {
@@ -771,6 +807,12 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) {
Map* map = obj->map();
if (map == filler_map) continue;
+ if (obj->IsMap()) {
+ Map* map = Map::cast(obj);
+ heap_->ClearCacheOnMap(map);
+ }
+
+
int size = obj->SizeFromMap(map);
bytes_to_process -= size;
MarkBit map_mark_bit = Marking::MarkBitFrom(map);
@@ -796,9 +838,9 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) {
SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) ||
(obj->IsFiller() && Marking::IsWhite(obj_mark_bit)));
Marking::MarkBlack(obj_mark_bit);
- MemoryChunk::IncrementLiveBytes(obj->address(), size);
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
}
- if (marking_deque_.IsEmpty()) MarkingComplete();
+ if (marking_deque_.IsEmpty()) MarkingComplete(action);
}
allocated_ = 0;
@@ -808,35 +850,64 @@ void IncrementalMarking::Step(intptr_t allocated_bytes) {
bool speed_up = false;
- if (old_generation_space_available_at_start_of_incremental_ < 10 * MB ||
- SpaceLeftInOldSpace() <
- old_generation_space_available_at_start_of_incremental_ >> 1) {
- // Half of the space that was available is gone while we were
- // incrementally marking.
+ if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0) {
+ if (FLAG_trace_gc) {
+ PrintF("Speed up marking after %d steps\n",
+ static_cast<int>(kAllocationMarkingFactorSpeedupInterval));
+ }
speed_up = true;
- old_generation_space_available_at_start_of_incremental_ =
- SpaceLeftInOldSpace();
}
- if (heap_->PromotedTotalSize() >
- old_generation_space_used_at_start_of_incremental_ << 1) {
- // Size of old space doubled while we were incrementally marking.
+ bool space_left_is_very_small =
+ (old_generation_space_available_at_start_of_incremental_ < 10 * MB);
+
+ bool only_1_nth_of_space_that_was_available_still_left =
+ (SpaceLeftInOldSpace() * (allocation_marking_factor_ + 1) <
+ old_generation_space_available_at_start_of_incremental_);
+
+ if (space_left_is_very_small ||
+ only_1_nth_of_space_that_was_available_still_left) {
+ if (FLAG_trace_gc) PrintF("Speed up marking because of low space left\n");
speed_up = true;
- old_generation_space_used_at_start_of_incremental_ =
- heap_->PromotedTotalSize();
}
- if ((steps_count_ % kAllocationMarkingFactorSpeedupInterval) == 0 &&
- allocation_marking_factor_ < kMaxAllocationMarkingFactor) {
+ bool size_of_old_space_multiplied_by_n_during_marking =
+ (heap_->PromotedTotalSize() >
+ (allocation_marking_factor_ + 1) *
+ old_generation_space_used_at_start_of_incremental_);
+ if (size_of_old_space_multiplied_by_n_during_marking) {
speed_up = true;
+ if (FLAG_trace_gc) {
+ PrintF("Speed up marking because of heap size increase\n");
+ }
}
- if (speed_up && 0) {
- allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
- allocation_marking_factor_ =
- static_cast<int>(allocation_marking_factor_ * 1.3);
+ int64_t promoted_during_marking = heap_->PromotedTotalSize()
+ - old_generation_space_used_at_start_of_incremental_;
+ intptr_t delay = allocation_marking_factor_ * MB;
+ intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
+
+ // We try to scan at at least twice the speed that we are allocating.
+ if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
if (FLAG_trace_gc) {
- PrintF("Marking speed increased to %d\n", allocation_marking_factor_);
+ PrintF("Speed up marking because marker was not keeping up\n");
+ }
+ speed_up = true;
+ }
+
+ if (speed_up) {
+ if (state_ != MARKING) {
+ if (FLAG_trace_gc) {
+ PrintF("Postponing speeding up marking until marking starts\n");
+ }
+ } else {
+ allocation_marking_factor_ += kAllocationMarkingFactorSpeedup;
+ allocation_marking_factor_ = static_cast<int>(
+ Min(kMaxAllocationMarkingFactor,
+ static_cast<intptr_t>(allocation_marking_factor_ * 1.3)));
+ if (FLAG_trace_gc) {
+ PrintF("Marking speed increased to %d\n", allocation_marking_factor_);
+ }
}
}
@@ -862,6 +933,7 @@ void IncrementalMarking::ResetStepCounters() {
steps_took_since_last_gc_ = 0;
bytes_rescanned_ = 0;
allocation_marking_factor_ = kInitialAllocationMarkingFactor;
+ bytes_scanned_ = 0;
}
diff --git a/src/3rdparty/v8/src/incremental-marking.h b/src/3rdparty/v8/src/incremental-marking.h
index 4542fbd..8cbe6c1 100644
--- a/src/3rdparty/v8/src/incremental-marking.h
+++ b/src/3rdparty/v8/src/incremental-marking.h
@@ -46,6 +46,11 @@ class IncrementalMarking {
COMPLETE
};
+ enum CompletionAction {
+ GC_VIA_STACK_GUARD,
+ NO_GC_VIA_STACK_GUARD
+ };
+
explicit IncrementalMarking(Heap* heap);
void TearDown();
@@ -56,6 +61,7 @@ class IncrementalMarking {
}
bool should_hurry() { return should_hurry_; }
+ void set_should_hurry(bool val) { should_hurry_ = val; }
inline bool IsStopped() { return state() == STOPPED; }
@@ -63,6 +69,8 @@ class IncrementalMarking {
inline bool IsMarkingIncomplete() { return state() == MARKING; }
+ inline bool IsComplete() { return state() == COMPLETE; }
+
bool WorthActivating();
void Start();
@@ -79,7 +87,7 @@ class IncrementalMarking {
void Abort();
- void MarkingComplete();
+ void MarkingComplete(CompletionAction action);
// It's hard to know how much work the incremental marker should do to make
// progress in the face of the mutator creating new work for it. We start
@@ -96,12 +104,14 @@ class IncrementalMarking {
static const intptr_t kAllocationMarkingFactorSpeedupInterval = 1024;
// This is how much we increase the marking/allocating factor by.
static const intptr_t kAllocationMarkingFactorSpeedup = 2;
- static const intptr_t kMaxAllocationMarkingFactor = 1000000000;
+ static const intptr_t kMaxAllocationMarkingFactor = 1000;
void OldSpaceStep(intptr_t allocated) {
- Step(allocated * kFastMarking / kInitialAllocationMarkingFactor);
+ Step(allocated * kFastMarking / kInitialAllocationMarkingFactor,
+ GC_VIA_STACK_GUARD);
}
- void Step(intptr_t allocated);
+
+ void Step(intptr_t allocated, CompletionAction action);
inline void RestartIfNotMarking() {
if (state_ == COMPLETE) {
@@ -213,11 +223,9 @@ class IncrementalMarking {
no_marking_scope_depth_--;
}
- private:
- void set_should_hurry(bool val) {
- should_hurry_ = val;
- }
+ void UncommitMarkingDeque();
+ private:
int64_t SpaceLeftInOldSpace();
void ResetStepCounters();
@@ -250,6 +258,7 @@ class IncrementalMarking {
bool is_compacting_;
VirtualMemory* marking_deque_memory_;
+ bool marking_deque_memory_committed_;
MarkingDeque marking_deque_;
int steps_count_;
@@ -262,6 +271,7 @@ class IncrementalMarking {
int64_t bytes_rescanned_;
bool should_hurry_;
int allocation_marking_factor_;
+ intptr_t bytes_scanned_;
intptr_t allocated_;
int no_marking_scope_depth_;
diff --git a/src/3rdparty/v8/src/inspector.cc b/src/3rdparty/v8/src/inspector.cc
index 8fb80f1..833d338 100644
--- a/src/3rdparty/v8/src/inspector.cc
+++ b/src/3rdparty/v8/src/inspector.cc
@@ -38,11 +38,11 @@ namespace internal {
//============================================================================
// The Inspector.
-void Inspector::DumpObjectType(FILE* out, Object *obj, bool print_more) {
+void Inspector::DumpObjectType(FILE* out, Object* obj, bool print_more) {
// Dump the object pointer.
OS::FPrint(out, "%p:", reinterpret_cast<void*>(obj));
if (obj->IsHeapObject()) {
- HeapObject *hobj = HeapObject::cast(obj);
+ HeapObject* hobj = HeapObject::cast(obj);
OS::FPrint(out, " size %d :", hobj->Size());
}
diff --git a/src/3rdparty/v8/src/inspector.h b/src/3rdparty/v8/src/inspector.h
index e328bcd..6962e21 100644
--- a/src/3rdparty/v8/src/inspector.h
+++ b/src/3rdparty/v8/src/inspector.h
@@ -41,14 +41,14 @@ namespace internal {
class Inspector {
public:
- static void DumpObjectType(FILE* out, Object *obj, bool print_more);
- static void DumpObjectType(FILE* out, Object *obj) {
+ static void DumpObjectType(FILE* out, Object* obj, bool print_more);
+ static void DumpObjectType(FILE* out, Object* obj) {
DumpObjectType(out, obj, false);
}
- static void DumpObjectType(Object *obj, bool print_more) {
+ static void DumpObjectType(Object* obj, bool print_more) {
DumpObjectType(stdout, obj, print_more);
}
- static void DumpObjectType(Object *obj) {
+ static void DumpObjectType(Object* obj) {
DumpObjectType(stdout, obj, false);
}
};
diff --git a/src/3rdparty/v8/src/interface.cc b/src/3rdparty/v8/src/interface.cc
new file mode 100644
index 0000000..e344b86
--- /dev/null
+++ b/src/3rdparty/v8/src/interface.cc
@@ -0,0 +1,226 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "interface.h"
+
+namespace v8 {
+namespace internal {
+
+static bool Match(void* key1, void* key2) {
+ String* name1 = *static_cast<String**>(key1);
+ String* name2 = *static_cast<String**>(key2);
+ ASSERT(name1->IsSymbol());
+ ASSERT(name2->IsSymbol());
+ return name1 == name2;
+}
+
+
+Interface* Interface::Lookup(Handle<String> name) {
+ ASSERT(IsModule());
+ ZoneHashMap* map = Chase()->exports_;
+ if (map == NULL) return NULL;
+ ZoneHashMap::Entry* p = map->Lookup(name.location(), name->Hash(), false);
+ if (p == NULL) return NULL;
+ ASSERT(*static_cast<String**>(p->key) == *name);
+ ASSERT(p->value != NULL);
+ return static_cast<Interface*>(p->value);
+}
+
+
+#ifdef DEBUG
+// Current nesting depth for debug output.
+class Nesting {
+ public:
+ Nesting() { current_ += 2; }
+ ~Nesting() { current_ -= 2; }
+ static int current() { return current_; }
+ private:
+ static int current_;
+};
+
+int Nesting::current_ = 0;
+#endif
+
+
+void Interface::DoAdd(
+ void* name, uint32_t hash, Interface* interface, bool* ok) {
+ MakeModule(ok);
+ if (!*ok) return;
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*s# Adding...\n", Nesting::current(), "");
+ PrintF("%*sthis = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*s%s : ", Nesting::current(), "",
+ (*reinterpret_cast<String**>(name))->ToAsciiArray());
+ interface->Print(Nesting::current());
+ }
+#endif
+
+ ZoneHashMap** map = &Chase()->exports_;
+ if (*map == NULL) *map = new ZoneHashMap(Match, 8);
+
+ ZoneHashMap::Entry* p = (*map)->Lookup(name, hash, !IsFrozen());
+ if (p == NULL) {
+ // This didn't have name but was frozen already, that's an error.
+ *ok = false;
+ } else if (p->value == NULL) {
+ p->value = interface;
+ } else {
+#ifdef DEBUG
+ Nesting nested;
+#endif
+ reinterpret_cast<Interface*>(p->value)->Unify(interface, ok);
+ }
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*sthis' = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*s# Added.\n", Nesting::current(), "");
+ }
+#endif
+}
+
+
+void Interface::Unify(Interface* that, bool* ok) {
+ if (this->forward_) return this->Chase()->Unify(that, ok);
+ if (that->forward_) return this->Unify(that->Chase(), ok);
+ ASSERT(this->forward_ == NULL);
+ ASSERT(that->forward_ == NULL);
+
+ *ok = true;
+ if (this == that) return;
+ if (this->IsValue()) return that->MakeValue(ok);
+ if (that->IsValue()) return this->MakeValue(ok);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*s# Unifying...\n", Nesting::current(), "");
+ PrintF("%*sthis = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*sthat = ", Nesting::current(), "");
+ that->Print(Nesting::current());
+ }
+#endif
+
+ // Merge the smaller interface into the larger, for performance.
+ if (this->exports_ != NULL && (that->exports_ == NULL ||
+ this->exports_->occupancy() >= that->exports_->occupancy())) {
+ this->DoUnify(that, ok);
+ } else {
+ that->DoUnify(this, ok);
+ }
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) {
+ PrintF("%*sthis' = ", Nesting::current(), "");
+ this->Print(Nesting::current());
+ PrintF("%*sthat' = ", Nesting::current(), "");
+ that->Print(Nesting::current());
+ PrintF("%*s# Unified.\n", Nesting::current(), "");
+ }
+#endif
+}
+
+
+void Interface::DoUnify(Interface* that, bool* ok) {
+ ASSERT(this->forward_ == NULL);
+ ASSERT(that->forward_ == NULL);
+ ASSERT(!this->IsValue());
+ ASSERT(!that->IsValue());
+ ASSERT(*ok);
+
+#ifdef DEBUG
+ Nesting nested;
+#endif
+
+ // Try to merge all members from that into this.
+ ZoneHashMap* map = that->exports_;
+ if (map != NULL) {
+ for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+ this->DoAdd(p->key, p->hash, static_cast<Interface*>(p->value), ok);
+ if (!*ok) return;
+ }
+ }
+
+ // If the new interface is larger than that's, then there were members in
+ // 'this' which 'that' didn't have. If 'that' was frozen that is an error.
+ int this_size = this->exports_ == NULL ? 0 : this->exports_->occupancy();
+ int that_size = map == NULL ? 0 : map->occupancy();
+ if (that->IsFrozen() && this_size > that_size) {
+ *ok = false;
+ return;
+ }
+
+ // Merge interfaces.
+ this->flags_ |= that->flags_;
+ that->forward_ = this;
+}
+
+
+#ifdef DEBUG
+void Interface::Print(int n) {
+ int n0 = n > 0 ? n : 0;
+
+ if (FLAG_print_interface_details) {
+ PrintF("%p", static_cast<void*>(this));
+ for (Interface* link = this->forward_; link != NULL; link = link->forward_)
+ PrintF("->%p", static_cast<void*>(link));
+ PrintF(" ");
+ }
+
+ if (IsUnknown()) {
+ PrintF("unknown\n");
+ } else if (IsValue()) {
+ PrintF("value\n");
+ } else if (IsModule()) {
+ PrintF("module %s{", IsFrozen() ? "" : "(unresolved) ");
+ ZoneHashMap* map = Chase()->exports_;
+ if (map == NULL || map->occupancy() == 0) {
+ PrintF("}\n");
+ } else if (n < 0 || n0 >= 2 * FLAG_print_interface_depth) {
+ // Avoid infinite recursion on cyclic types.
+ PrintF("...}\n");
+ } else {
+ PrintF("\n");
+ for (ZoneHashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) {
+ String* name = *static_cast<String**>(p->key);
+ Interface* interface = static_cast<Interface*>(p->value);
+ PrintF("%*s%s : ", n0 + 2, "", name->ToAsciiArray());
+ interface->Print(n0 + 2);
+ }
+ PrintF("%*s}\n", n0, "");
+ }
+ }
+}
+#endif
+
+} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/interface.h b/src/3rdparty/v8/src/interface.h
new file mode 100644
index 0000000..c2991cb
--- /dev/null
+++ b/src/3rdparty/v8/src/interface.h
@@ -0,0 +1,156 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_INTERFACE_H_
+#define V8_INTERFACE_H_
+
+#include "zone-inl.h" // For operator new.
+
+namespace v8 {
+namespace internal {
+
+
+// This class implements the following abstract grammar of interfaces
+// (i.e. module types):
+// interface ::= UNDETERMINED | VALUE | MODULE(exports)
+// exports ::= {name : interface, ...}
+// A frozen module type is one that is fully determined. Unification does not
+// allow adding additional exports to frozen interfaces.
+// Otherwise, unifying modules merges their exports.
+// Undetermined types are unification variables that can be unified freely.
+
+class Interface : public ZoneObject {
+ public:
+ // ---------------------------------------------------------------------------
+ // Factory methods.
+
+ static Interface* NewValue() {
+ static Interface value_interface(VALUE + FROZEN); // Cached.
+ return &value_interface;
+ }
+
+ static Interface* NewUnknown() {
+ return new Interface(NONE);
+ }
+
+ static Interface* NewModule() {
+ return new Interface(MODULE);
+ }
+
+ // ---------------------------------------------------------------------------
+ // Mutators.
+
+ // Add a name to the list of exports. If it already exists, unify with
+ // interface, otherwise insert unless this is closed.
+ void Add(Handle<String> name, Interface* interface, bool* ok) {
+ DoAdd(name.location(), name->Hash(), interface, ok);
+ }
+
+ // Unify with another interface. If successful, both interface objects will
+ // represent the same type, and changes to one are reflected in the other.
+ void Unify(Interface* that, bool* ok);
+
+ // Determine this interface to be a value interface.
+ void MakeValue(bool* ok) {
+ *ok = !IsModule();
+ if (*ok) Chase()->flags_ |= VALUE;
+ }
+
+ // Determine this interface to be a module interface.
+ void MakeModule(bool* ok) {
+ *ok = !IsValue();
+ if (*ok) Chase()->flags_ |= MODULE;
+ }
+
+ // Do not allow any further refinements, directly or through unification.
+ void Freeze(bool* ok) {
+ *ok = IsValue() || IsModule();
+ if (*ok) Chase()->flags_ |= FROZEN;
+ }
+
+ // ---------------------------------------------------------------------------
+ // Accessors.
+
+ // Look up an exported name. Returns NULL if not (yet) defined.
+ Interface* Lookup(Handle<String> name);
+
+ // Check whether this is still a fully undetermined type.
+ bool IsUnknown() { return Chase()->flags_ == NONE; }
+
+ // Check whether this is a value type.
+ bool IsValue() { return Chase()->flags_ & VALUE; }
+
+ // Check whether this is a module type.
+ bool IsModule() { return Chase()->flags_ & MODULE; }
+
+ // Check whether this is closed (i.e. fully determined).
+ bool IsFrozen() { return Chase()->flags_ & FROZEN; }
+
+ // ---------------------------------------------------------------------------
+ // Debugging.
+#ifdef DEBUG
+ void Print(int n = 0); // n = indentation; n < 0 => don't print recursively
+#endif
+
+ // ---------------------------------------------------------------------------
+ // Implementation.
+ private:
+ enum Flags { // All flags are monotonic
+ NONE = 0,
+ VALUE = 1, // This type describes a value
+ MODULE = 2, // This type describes a module
+ FROZEN = 4 // This type is fully determined
+ };
+
+ int flags_;
+ Interface* forward_; // Unification link
+ ZoneHashMap* exports_; // Module exports and their types (allocated lazily)
+
+ explicit Interface(int flags)
+ : flags_(flags),
+ forward_(NULL),
+ exports_(NULL) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Creating %p\n", static_cast<void*>(this));
+#endif
+ }
+
+ Interface* Chase() {
+ Interface* result = this;
+ while (result->forward_ != NULL) result = result->forward_;
+ if (result != this) forward_ = result; // On-the-fly path compression.
+ return result;
+ }
+
+ void DoAdd(void* name, uint32_t hash, Interface* interface, bool* ok);
+ void DoUnify(Interface* that, bool* ok);
+};
+
+} } // namespace v8::internal
+
+#endif // V8_INTERFACE_H_
diff --git a/src/3rdparty/v8/src/interpreter-irregexp.cc b/src/3rdparty/v8/src/interpreter-irregexp.cc
index b337e88..3a92b84 100644
--- a/src/3rdparty/v8/src/interpreter-irregexp.cc
+++ b/src/3rdparty/v8/src/interpreter-irregexp.cc
@@ -33,8 +33,9 @@
#include "utils.h"
#include "ast.h"
#include "bytecodes-irregexp.h"
-#include "jsregexp.h"
#include "interpreter-irregexp.h"
+#include "jsregexp.h"
+#include "regexp-macro-assembler.h"
namespace v8 {
namespace internal {
@@ -449,6 +450,37 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate,
}
break;
}
+ BYTECODE(CHECK_CHAR_IN_RANGE) {
+ uint32_t from = Load16Aligned(pc + 4);
+ uint32_t to = Load16Aligned(pc + 6);
+ if (from <= current_char && current_char <= to) {
+ pc = code_base + Load32Aligned(pc + 8);
+ } else {
+ pc += BC_CHECK_CHAR_IN_RANGE_LENGTH;
+ }
+ break;
+ }
+ BYTECODE(CHECK_CHAR_NOT_IN_RANGE) {
+ uint32_t from = Load16Aligned(pc + 4);
+ uint32_t to = Load16Aligned(pc + 6);
+ if (from > current_char || current_char > to) {
+ pc = code_base + Load32Aligned(pc + 8);
+ } else {
+ pc += BC_CHECK_CHAR_NOT_IN_RANGE_LENGTH;
+ }
+ break;
+ }
+ BYTECODE(CHECK_BIT_IN_TABLE) {
+ int mask = RegExpMacroAssembler::kTableMask;
+ byte b = pc[8 + ((current_char & mask) >> kBitsPerByteLog2)];
+ int bit = (current_char & (kBitsPerByte - 1));
+ if ((b & (1 << bit)) != 0) {
+ pc = code_base + Load32Aligned(pc + 4);
+ } else {
+ pc += BC_CHECK_BIT_IN_TABLE_LENGTH;
+ }
+ break;
+ }
BYTECODE(CHECK_LT) {
uint32_t limit = (insn >> BYTECODE_SHIFT);
if (current_char < limit) {
@@ -488,59 +520,6 @@ static RegExpImpl::IrregexpResult RawMatch(Isolate* isolate,
pc += BC_CHECK_REGISTER_EQ_POS_LENGTH;
}
break;
- BYTECODE(LOOKUP_MAP1) {
- // Look up character in a bitmap. If we find a 0, then jump to the
- // location at pc + 8. Otherwise fall through!
- int index = current_char - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + (index >> 3)];
- map = ((map >> (index & 7)) & 1);
- if (map == 0) {
- pc = code_base + Load32Aligned(pc + 8);
- } else {
- pc += BC_LOOKUP_MAP1_LENGTH;
- }
- break;
- }
- BYTECODE(LOOKUP_MAP2) {
- // Look up character in a half-nibble map. If we find 00, then jump to
- // the location at pc + 8. If we find 01 then jump to location at
- // pc + 11, etc.
- int index = (current_char - (insn >> BYTECODE_SHIFT)) << 1;
- byte map = code_base[Load32Aligned(pc + 3) + (index >> 3)];
- map = ((map >> (index & 7)) & 3);
- if (map < 2) {
- if (map == 0) {
- pc = code_base + Load32Aligned(pc + 8);
- } else {
- pc = code_base + Load32Aligned(pc + 12);
- }
- } else {
- if (map == 2) {
- pc = code_base + Load32Aligned(pc + 16);
- } else {
- pc = code_base + Load32Aligned(pc + 20);
- }
- }
- break;
- }
- BYTECODE(LOOKUP_MAP8) {
- // Look up character in a byte map. Use the byte as an index into a
- // table that follows this instruction immediately.
- int index = current_char - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + index];
- const byte* new_pc = code_base + Load32Aligned(pc + 8) + (map << 2);
- pc = code_base + Load32Aligned(new_pc);
- break;
- }
- BYTECODE(LOOKUP_HI_MAP8) {
- // Look up high byte of this character in a byte map. Use the byte as
- // an index into a table that follows this instruction immediately.
- int index = (current_char >> 8) - (insn >> BYTECODE_SHIFT);
- byte map = code_base[Load32Aligned(pc + 4) + index];
- const byte* new_pc = code_base + Load32Aligned(pc + 8) + (map << 2);
- pc = code_base + Load32Aligned(new_pc);
- break;
- }
BYTECODE(CHECK_NOT_REGS_EQUAL)
if (registers[insn >> BYTECODE_SHIFT] ==
registers[Load32Aligned(pc + 4)]) {
diff --git a/src/3rdparty/v8/src/isolate-inl.h b/src/3rdparty/v8/src/isolate-inl.h
index 0a2c174..9fb16fb 100644
--- a/src/3rdparty/v8/src/isolate-inl.h
+++ b/src/3rdparty/v8/src/isolate-inl.h
@@ -49,6 +49,16 @@ SaveContext::SaveContext(Isolate* isolate) : prev_(isolate->save_context()) {
}
+bool Isolate::IsDebuggerActive() {
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ if (!NoBarrier_Load(&debugger_initialized_)) return false;
+ return debugger()->IsDebuggerActive();
+#else
+ return false;
+#endif
+}
+
+
bool Isolate::DebuggerHasBreakPoints() {
#ifdef ENABLE_DEBUGGER_SUPPORT
return debug()->has_break_points();
diff --git a/src/3rdparty/v8/src/isolate.cc b/src/3rdparty/v8/src/isolate.cc
index 36c1dfd..bf9b345 100644
--- a/src/3rdparty/v8/src/isolate.cc
+++ b/src/3rdparty/v8/src/isolate.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,6 +41,7 @@
#include "lithium-allocator.h"
#include "log.h"
#include "messages.h"
+#include "platform.h"
#include "regexp-stack.h"
#include "runtime-profiler.h"
#include "scopeinfo.h"
@@ -96,7 +97,6 @@ void ThreadLocalTop::InitializeInternal() {
thread_id_ = ThreadId::Invalid();
external_caught_exception_ = false;
failed_access_check_callback_ = NULL;
- user_object_comparison_callback_ = NULL;
save_context_ = NULL;
catcher_ = NULL;
top_lookup_result_ = NULL;
@@ -312,7 +312,6 @@ void Isolate::PreallocatedStorageDelete(void* p) {
storage->LinkTo(&free_list_);
}
-
Isolate* Isolate::default_isolate_ = NULL;
Thread::LocalStorageKey Isolate::isolate_key_;
Thread::LocalStorageKey Isolate::thread_id_key_;
@@ -321,26 +320,6 @@ Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex();
Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
-class IsolateInitializer {
- public:
- IsolateInitializer() {
- Isolate::EnsureDefaultIsolate();
- }
-};
-
-static IsolateInitializer* EnsureDefaultIsolateAllocated() {
- // TODO(isolates): Use the system threading API to do this once?
- static IsolateInitializer static_initializer;
- return &static_initializer;
-}
-
-// This variable only needed to trigger static intialization.
-static IsolateInitializer* static_initializer = EnsureDefaultIsolateAllocated();
-
-
-
-
-
Isolate::PerIsolateThreadData* Isolate::AllocatePerIsolateThreadData(
ThreadId thread_id) {
ASSERT(!thread_id.Equals(ThreadId::Invalid()));
@@ -391,12 +370,17 @@ void Isolate::EnsureDefaultIsolate() {
default_isolate_ = new Isolate();
}
// Can't use SetIsolateThreadLocals(default_isolate_, NULL) here
- // becase a non-null thread data may be already set.
+ // because a non-null thread data may be already set.
if (Thread::GetThreadLocal(isolate_key_) == NULL) {
Thread::SetThreadLocal(isolate_key_, default_isolate_);
}
}
+struct StaticInitializer {
+ StaticInitializer() {
+ Isolate::EnsureDefaultIsolate();
+ }
+} static_initializer;
#ifdef ENABLE_DEBUGGER_SUPPORT
Debugger* Isolate::GetDefaultIsolateDebugger() {
@@ -543,6 +527,18 @@ Handle<String> Isolate::StackTraceString() {
}
+void Isolate::CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object) {
+ if (capture_stack_trace_for_uncaught_exceptions_) {
+ // Capture stack trace for a detailed exception message.
+ Handle<String> key = factory()->hidden_stack_trace_symbol();
+ Handle<JSArray> stack_trace = CaptureCurrentStackTrace(
+ stack_trace_for_uncaught_exceptions_frame_limit_,
+ stack_trace_for_uncaught_exceptions_options_);
+ JSObject::SetHiddenProperty(error_object, key, stack_trace);
+ }
+}
+
+
Handle<JSArray> Isolate::CaptureCurrentStackTrace(
int frame_limit, StackTrace::StackTraceOptions options) {
// Ensure no negative values.
@@ -571,7 +567,7 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
frame->Summarize(&frames);
for (int i = frames.length() - 1; i >= 0 && frames_seen < limit; i--) {
// Create a JSObject to hold the information for the StackFrame.
- Handle<JSObject> stackFrame = factory()->NewJSObject(object_function());
+ Handle<JSObject> stack_frame = factory()->NewJSObject(object_function());
Handle<JSFunction> fun = frames[i].function();
Handle<Script> script(Script::cast(fun->shared()->script()));
@@ -592,16 +588,24 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
// tag.
column_offset += script->column_offset()->value();
}
- SetLocalPropertyNoThrow(stackFrame, column_key,
- Handle<Smi>(Smi::FromInt(column_offset + 1)));
+ CHECK_NOT_EMPTY_HANDLE(
+ this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, column_key,
+ Handle<Smi>(Smi::FromInt(column_offset + 1)), NONE));
}
- SetLocalPropertyNoThrow(stackFrame, line_key,
- Handle<Smi>(Smi::FromInt(line_number + 1)));
+ CHECK_NOT_EMPTY_HANDLE(
+ this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, line_key,
+ Handle<Smi>(Smi::FromInt(line_number + 1)), NONE));
}
if (options & StackTrace::kScriptName) {
Handle<Object> script_name(script->name(), this);
- SetLocalPropertyNoThrow(stackFrame, script_key, script_name);
+ CHECK_NOT_EMPTY_HANDLE(this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, script_key, script_name, NONE));
}
if (options & StackTrace::kScriptNameOrSourceURL) {
@@ -617,8 +621,10 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
if (caught_exception) {
result = factory()->undefined_value();
}
- SetLocalPropertyNoThrow(stackFrame, script_name_or_source_url_key,
- result);
+ CHECK_NOT_EMPTY_HANDLE(this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, script_name_or_source_url_key,
+ result, NONE));
}
if (options & StackTrace::kFunctionName) {
@@ -626,23 +632,30 @@ Handle<JSArray> Isolate::CaptureCurrentStackTrace(
if (fun_name->ToBoolean()->IsFalse()) {
fun_name = Handle<Object>(fun->shared()->inferred_name(), this);
}
- SetLocalPropertyNoThrow(stackFrame, function_key, fun_name);
+ CHECK_NOT_EMPTY_HANDLE(this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, function_key, fun_name, NONE));
}
if (options & StackTrace::kIsEval) {
int type = Smi::cast(script->compilation_type())->value();
Handle<Object> is_eval = (type == Script::COMPILATION_TYPE_EVAL) ?
factory()->true_value() : factory()->false_value();
- SetLocalPropertyNoThrow(stackFrame, eval_key, is_eval);
+ CHECK_NOT_EMPTY_HANDLE(this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, eval_key, is_eval, NONE));
}
if (options & StackTrace::kIsConstructor) {
Handle<Object> is_constructor = (frames[i].is_constructor()) ?
factory()->true_value() : factory()->false_value();
- SetLocalPropertyNoThrow(stackFrame, constructor_key, is_constructor);
+ CHECK_NOT_EMPTY_HANDLE(this,
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ stack_frame, constructor_key,
+ is_constructor, NONE));
}
- FixedArray::cast(stack_trace->elements())->set(frames_seen, *stackFrame);
+ FixedArray::cast(stack_trace->elements())->set(frames_seen, *stack_frame);
frames_seen++;
}
it.Advance();
@@ -730,12 +743,6 @@ void Isolate::SetFailedAccessCheckCallback(
thread_local_top()->failed_access_check_callback_ = callback;
}
-
-void Isolate::SetUserObjectComparisonCallback(
- v8::UserObjectComparisonCallback callback) {
- thread_local_top()->user_object_comparison_callback_ = callback;
-}
-
void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) {
if (!thread_local_top()->failed_access_check_callback_) return;
@@ -753,10 +760,12 @@ void Isolate::ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type) {
HandleScope scope;
Handle<JSObject> receiver_handle(receiver);
Handle<Object> data(AccessCheckInfo::cast(data_obj)->data());
- thread_local_top()->failed_access_check_callback_(
- v8::Utils::ToLocal(receiver_handle),
- type,
- v8::Utils::ToLocal(data));
+ { VMState state(this, EXTERNAL);
+ thread_local_top()->failed_access_check_callback_(
+ v8::Utils::ToLocal(receiver_handle),
+ type,
+ v8::Utils::ToLocal(data));
+ }
}
@@ -1001,7 +1010,7 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
// Find the top-most try-catch handler.
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
- while (handler != NULL && !handler->is_try_catch()) {
+ while (handler != NULL && !handler->is_catch()) {
handler = handler->next();
}
@@ -1027,22 +1036,39 @@ bool Isolate::ShouldReportException(bool* can_be_caught_externally,
}
-void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
+bool Isolate::IsErrorObject(Handle<Object> obj) {
+ if (!obj->IsJSObject()) return false;
+
+ String* error_key = *(factory()->LookupAsciiSymbol("$Error"));
+ Object* error_constructor =
+ js_builtins_object()->GetPropertyNoExceptionThrown(error_key);
+
+ for (Object* prototype = *obj; !prototype->IsNull();
+ prototype = prototype->GetPrototype()) {
+ if (!prototype->IsJSObject()) return false;
+ if (JSObject::cast(prototype)->map()->constructor() == error_constructor) {
+ return true;
+ }
+ }
+ return false;
+}
+
+
+void Isolate::DoThrow(Object* exception, MessageLocation* location) {
ASSERT(!has_pending_exception());
HandleScope scope;
- Object* exception_object = Smi::FromInt(0);
- bool is_object = exception->ToObject(&exception_object);
- Handle<Object> exception_handle(exception_object);
+ Handle<Object> exception_handle(exception);
// Determine reporting and whether the exception is caught externally.
bool catchable_by_javascript = is_catchable_by_javascript(exception);
- // Only real objects can be caught by JS.
- ASSERT(!catchable_by_javascript || is_object);
bool can_be_caught_externally = false;
bool should_report_exception =
ShouldReportException(&can_be_caught_externally, catchable_by_javascript);
bool report_exception = catchable_by_javascript && should_report_exception;
+ bool try_catch_needs_message =
+ can_be_caught_externally && try_catch_handler()->capture_message_;
+ bool bootstrapping = bootstrapper()->IsActive();
#ifdef ENABLE_DEBUGGER_SUPPORT
// Notify debugger of exception.
@@ -1051,34 +1077,52 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
}
#endif
- // Generate the message.
- Handle<Object> message_obj;
- MessageLocation potential_computed_location;
- bool try_catch_needs_message =
- can_be_caught_externally &&
- try_catch_handler()->capture_message_;
+ // Generate the message if required.
if (report_exception || try_catch_needs_message) {
+ MessageLocation potential_computed_location;
if (location == NULL) {
- // If no location was specified we use a computed one instead
+ // If no location was specified we use a computed one instead.
ComputeLocation(&potential_computed_location);
location = &potential_computed_location;
}
- if (!bootstrapper()->IsActive()) {
- // It's not safe to try to make message objects or collect stack
- // traces while the bootstrapper is active since the infrastructure
- // may not have been properly initialized.
+ // It's not safe to try to make message objects or collect stack traces
+ // while the bootstrapper is active since the infrastructure may not have
+ // been properly initialized.
+ if (!bootstrapping) {
Handle<String> stack_trace;
if (FLAG_trace_exception) stack_trace = StackTraceString();
Handle<JSArray> stack_trace_object;
- if (report_exception && capture_stack_trace_for_uncaught_exceptions_) {
+ if (capture_stack_trace_for_uncaught_exceptions_) {
+ if (IsErrorObject(exception_handle)) {
+ // We fetch the stack trace that corresponds to this error object.
+ String* key = heap()->hidden_stack_trace_symbol();
+ Object* stack_property =
+ JSObject::cast(*exception_handle)->GetHiddenProperty(key);
+ // Property lookup may have failed. In this case it's probably not
+ // a valid Error object.
+ if (stack_property->IsJSArray()) {
+ stack_trace_object = Handle<JSArray>(JSArray::cast(stack_property));
+ }
+ }
+ if (stack_trace_object.is_null()) {
+ // Not an error object, we capture at throw site.
stack_trace_object = CaptureCurrentStackTrace(
stack_trace_for_uncaught_exceptions_frame_limit_,
stack_trace_for_uncaught_exceptions_options_);
+ }
}
- ASSERT(is_object); // Can't use the handle unless there's a real object.
- message_obj = MessageHandler::MakeMessageObject("uncaught_exception",
- location, HandleVector<Object>(&exception_handle, 1), stack_trace,
+ Handle<Object> message_obj = MessageHandler::MakeMessageObject(
+ "uncaught_exception",
+ location,
+ HandleVector<Object>(&exception_handle, 1),
+ stack_trace,
stack_trace_object);
+ thread_local_top()->pending_message_obj_ = *message_obj;
+ if (location != NULL) {
+ thread_local_top()->pending_message_script_ = *location->script();
+ thread_local_top()->pending_message_start_pos_ = location->start_pos();
+ thread_local_top()->pending_message_end_pos_ = location->end_pos();
+ }
} else if (location != NULL && !location->script().is_null()) {
// We are bootstrapping and caught an error where the location is set
// and we have a script for the location.
@@ -1094,30 +1138,13 @@ void Isolate::DoThrow(MaybeObject* exception, MessageLocation* location) {
// Save the message for reporting if the the exception remains uncaught.
thread_local_top()->has_pending_message_ = report_exception;
- if (!message_obj.is_null()) {
- thread_local_top()->pending_message_obj_ = *message_obj;
- if (location != NULL) {
- thread_local_top()->pending_message_script_ = *location->script();
- thread_local_top()->pending_message_start_pos_ = location->start_pos();
- thread_local_top()->pending_message_end_pos_ = location->end_pos();
- }
- }
// Do not forget to clean catcher_ if currently thrown exception cannot
// be caught. If necessary, ReThrow will update the catcher.
thread_local_top()->catcher_ = can_be_caught_externally ?
try_catch_handler() : NULL;
- // NOTE: Notifying the debugger or generating the message
- // may have caused new exceptions. For now, we just ignore
- // that and set the pending exception to the original one.
- if (is_object) {
- set_pending_exception(*exception_handle);
- } else {
- // Failures are not on the heap so they neither need nor work with handles.
- ASSERT(exception_handle->IsFailure());
- set_pending_exception(exception);
- }
+ set_pending_exception(*exception_handle);
}
@@ -1153,8 +1180,8 @@ bool Isolate::IsExternallyCaught() {
StackHandler* handler =
StackHandler::FromAddress(Isolate::handler(thread_local_top()));
while (handler != NULL && handler->address() < external_handler_address) {
- ASSERT(!handler->is_try_catch());
- if (handler->is_try_finally()) return false;
+ ASSERT(!handler->is_catch());
+ if (handler->is_finally()) return false;
handler = handler->next();
}
@@ -1209,7 +1236,7 @@ bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
ASSERT(has_pending_exception());
PropagatePendingExceptionToExternalTryCatch();
- // Allways reschedule out of memory exceptions.
+ // Always reschedule out of memory exceptions.
if (!is_out_of_memory()) {
bool is_termination_exception =
pending_exception() == heap_.termination_exception();
@@ -1441,9 +1468,12 @@ Isolate::Isolate()
context_switcher_(NULL),
thread_manager_(NULL),
fp_stubs_generated_(false),
+ has_installed_extensions_(false),
string_tracker_(NULL),
regexp_stack_(NULL),
- embedder_data_(NULL) {
+ date_cache_(NULL),
+ embedder_data_(NULL),
+ context_exit_happened_(false) {
TRACE_ISOLATE(constructor);
memset(isolate_addresses_, 0,
@@ -1574,6 +1604,9 @@ Isolate::~Isolate() {
delete unicode_cache_;
unicode_cache_ = NULL;
+ delete date_cache_;
+ date_cache_ = NULL;
+
delete regexp_stack_;
regexp_stack_ = NULL;
@@ -1738,12 +1771,13 @@ bool Isolate::Init(Deserializer* des) {
stub_cache_ = new StubCache(this);
regexp_stack_ = new RegExpStack();
regexp_stack_->isolate_ = this;
+ date_cache_ = new DateCache();
// Enable logging before setting up the heap
- logger_->Setup();
+ logger_->SetUp();
- CpuProfiler::Setup();
- HeapProfiler::Setup();
+ CpuProfiler::SetUp();
+ HeapProfiler::SetUp();
// Initialize other runtime facilities
#if defined(USE_SIMULATOR)
@@ -1760,10 +1794,10 @@ bool Isolate::Init(Deserializer* des) {
stack_guard_.InitThread(lock);
}
- // Setup the object heap.
+ // SetUp the object heap.
const bool create_heap_objects = (des == NULL);
- ASSERT(!heap_.HasBeenSetup());
- if (!heap_.Setup(create_heap_objects)) {
+ ASSERT(!heap_.HasBeenSetUp());
+ if (!heap_.SetUp(create_heap_objects)) {
V8::SetFatalError();
return false;
}
@@ -1771,7 +1805,7 @@ bool Isolate::Init(Deserializer* des) {
InitializeThreadLocal();
bootstrapper_->Initialize(create_heap_objects);
- builtins_.Setup(create_heap_objects);
+ builtins_.SetUp(create_heap_objects);
// Only preallocate on the first initialization.
if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
@@ -1790,15 +1824,14 @@ bool Isolate::Init(Deserializer* des) {
}
#ifdef ENABLE_DEBUGGER_SUPPORT
- debug_->Setup(create_heap_objects);
+ debug_->SetUp(create_heap_objects);
#endif
- stub_cache_->Initialize(create_heap_objects);
// If we are deserializing, read the state into the now-empty heap.
if (des != NULL) {
des->Deserialize();
- stub_cache_->Initialize(true);
}
+ stub_cache_->Initialize();
// Finish initialization of ThreadLocal after deserialization is done.
clear_pending_exception();
@@ -1811,7 +1844,7 @@ bool Isolate::Init(Deserializer* des) {
deoptimizer_data_ = new DeoptimizerData;
runtime_profiler_ = new RuntimeProfiler(this);
- runtime_profiler_->Setup();
+ runtime_profiler_->SetUp();
// If we are deserializing, log non-function code objects and compiled
// functions found in the snapshot.
@@ -1822,6 +1855,7 @@ bool Isolate::Init(Deserializer* des) {
}
state_ = INITIALIZED;
+ time_millis_at_init_ = OS::TimeCurrentMillis();
return true;
}
diff --git a/src/3rdparty/v8/src/isolate.h b/src/3rdparty/v8/src/isolate.h
index 17461cf..2ff1318 100644
--- a/src/3rdparty/v8/src/isolate.h
+++ b/src/3rdparty/v8/src/isolate.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -36,8 +36,10 @@
#include "contexts.h"
#include "execution.h"
#include "frames.h"
+#include "date.h"
#include "global-handles.h"
#include "handles.h"
+#include "hashmap.h"
#include "heap.h"
#include "regexp-stack.h"
#include "runtime-profiler.h"
@@ -106,15 +108,28 @@ class Simulator;
// of handles to the actual constants.
typedef ZoneList<Handle<Object> > ZoneObjectList;
-#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
- if (isolate->has_scheduled_exception()) \
- return isolate->PromoteScheduledException()
+#define RETURN_IF_SCHEDULED_EXCEPTION(isolate) \
+ do { \
+ Isolate* __isolate__ = (isolate); \
+ if (__isolate__->has_scheduled_exception()) { \
+ return __isolate__->PromoteScheduledException(); \
+ } \
+ } while (false)
#define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
- if (call.is_null()) { \
- ASSERT(isolate->has_pending_exception()); \
- return value; \
- }
+ do { \
+ if ((call).is_null()) { \
+ ASSERT((isolate)->has_pending_exception()); \
+ return (value); \
+ } \
+ } while (false)
+
+#define CHECK_NOT_EMPTY_HANDLE(isolate, call) \
+ do { \
+ ASSERT(!(isolate)->has_pending_exception()); \
+ CHECK(!(call).is_null()); \
+ CHECK(!(isolate)->has_pending_exception()); \
+ } while (false)
#define RETURN_IF_EMPTY_HANDLE(isolate, call) \
RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
@@ -245,7 +260,7 @@ class ThreadLocalTop BASE_EMBEDDED {
#endif
#endif // USE_SIMULATOR
- Address js_entry_sp_; // the stack pointer of the bottom js entry frame
+ Address js_entry_sp_; // the stack pointer of the bottom JS entry frame
Address external_callback_; // the external callback we're currently in
StateTag current_vm_state_;
@@ -258,9 +273,6 @@ class ThreadLocalTop BASE_EMBEDDED {
// Head of the list of live LookupResults.
LookupResult* top_lookup_result_;
- // Call back function for user object comparisons
- v8::UserObjectComparisonCallback user_object_comparison_callback_;
-
// Whether out of memory exceptions should be ignored.
bool ignore_out_of_memory_;
@@ -270,23 +282,6 @@ class ThreadLocalTop BASE_EMBEDDED {
Address try_catch_handler_address_;
};
-#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
-
-#define ISOLATE_PLATFORM_INIT_LIST(V) \
- /* VirtualFrame::SpilledScope state */ \
- V(bool, is_virtual_frame_in_spilled_scope, false) \
- /* CodeGenerator::EmitNamedStore state */ \
- V(int, inlined_write_barrier_size, -1)
-
-#if !defined(__arm__) && !defined(__mips__)
-class HashMap;
-#endif
-
-#else
-
-#define ISOLATE_PLATFORM_INIT_LIST(V)
-
-#endif
#ifdef ENABLE_DEBUGGER_SUPPORT
@@ -323,8 +318,6 @@ class HashMap;
typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
#define ISOLATE_INIT_LIST(V) \
- /* AssertNoZoneAllocation state. */ \
- V(bool, zone_allow_allocation, true) \
/* SerializerDeserializer state. */ \
V(int, serialize_partial_snapshot_cache_length, 0) \
/* Assembler state. */ \
@@ -352,14 +345,13 @@ typedef List<HeapObject*, PreallocatedStorage> DebugObjectCache;
/* Serializer state. */ \
V(ExternalReferenceTable*, external_reference_table, NULL) \
/* AstNode state. */ \
- V(unsigned, ast_node_id, 0) \
+ V(int, ast_node_id, 0) \
V(unsigned, ast_node_count, 0) \
/* SafeStackFrameIterator activations count. */ \
V(int, safe_stack_iterator_counter, 0) \
V(uint64_t, enabled_cpu_features, 0) \
V(CpuProfiler*, cpu_profiler, NULL) \
V(HeapProfiler*, heap_profiler, NULL) \
- ISOLATE_PLATFORM_INIT_LIST(V) \
ISOLATE_DEBUGGER_INIT_LIST(V)
class Isolate {
@@ -475,7 +467,7 @@ class Isolate {
bool IsDefaultIsolate() const { return this == default_isolate_; }
// Ensures that process-wide resources and the default isolate have been
- // allocated. It is only necessary to call this method in rare casses, for
+ // allocated. It is only necessary to call this method in rare cases, for
// example if you are using V8 from within the body of a static initializer.
// Safe to call multiple times.
static void EnsureDefaultIsolate();
@@ -506,6 +498,8 @@ class Isolate {
return thread_id_key_;
}
+ static Thread::LocalStorageKey per_isolate_thread_data_key();
+
// If a client attempts to create a Locker without specifying an isolate,
// we assume that the client is using legacy behavior. Set up the current
// thread to be inside the implicit isolate (or fail a check if we have
@@ -625,7 +619,7 @@ class Isolate {
void* formal_count_address() { return &thread_local_top_.formal_count_; }
// Returns the global object of the current context. It could be
- // a builtin object, or a js global object.
+ // a builtin object, or a JS global object.
Handle<GlobalObject> global() {
return Handle<GlobalObject>(context()->global());
}
@@ -693,6 +687,8 @@ class Isolate {
int frame_limit,
StackTrace::StackTraceOptions options);
+ void CaptureAndSetCurrentStackTraceFor(Handle<JSObject> error_object);
+
// Returns if the top context may access the given global object. If
// the result is false, the pending exception is guaranteed to be
// set.
@@ -706,11 +702,6 @@ class Isolate {
void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
- void SetUserObjectComparisonCallback(v8::UserObjectComparisonCallback callback);
- inline v8::UserObjectComparisonCallback UserObjectComparisonCallback() {
- return thread_local_top()->user_object_comparison_callback_;
- }
-
// Exception throwing support. The caller should use the result
// of Throw() as its return value.
Failure* Throw(Object* exception, MessageLocation* location = NULL);
@@ -724,7 +715,7 @@ class Isolate {
// Promote a scheduled exception to pending. Asserts has_scheduled_exception.
Failure* PromoteScheduledException();
- void DoThrow(MaybeObject* exception, MessageLocation* location);
+ void DoThrow(Object* exception, MessageLocation* location);
// Checks if exception should be reported and finds out if it's
// caught externally.
bool ShouldReportException(bool* can_be_caught_externally,
@@ -903,6 +894,12 @@ class Isolate {
Builtins* builtins() { return &builtins_; }
+ void NotifyExtensionInstalled() {
+ has_installed_extensions_ = true;
+ }
+
+ bool has_installed_extensions() { return has_installed_extensions_; }
+
unibrow::Mapping<unibrow::Ecma262Canonicalize>*
regexp_macro_assembler_canonicalize() {
return &regexp_macro_assembler_canonicalize_;
@@ -930,6 +927,7 @@ class Isolate {
}
#endif
+ inline bool IsDebuggerActive();
inline bool DebuggerHasBreakPoints();
#ifdef DEBUG
@@ -1012,9 +1010,34 @@ class Isolate {
thread_local_top_.top_lookup_result_ = top;
}
+ bool context_exit_happened() {
+ return context_exit_happened_;
+ }
+ void set_context_exit_happened(bool context_exit_happened) {
+ context_exit_happened_ = context_exit_happened;
+ }
+
+ double time_millis_since_init() {
+ return OS::TimeCurrentMillis() - time_millis_at_init_;
+ }
+
+ DateCache* date_cache() {
+ return date_cache_;
+ }
+
+ void set_date_cache(DateCache* date_cache) {
+ if (date_cache != date_cache_) {
+ delete date_cache_;
+ }
+ date_cache_ = date_cache;
+ }
+
private:
Isolate();
+ friend struct GlobalState;
+ friend struct InitializeGlobalState;
+
// The per-process lock should be acquired before the ThreadDataTable is
// modified.
class ThreadDataTable {
@@ -1053,6 +1076,7 @@ class Isolate {
Isolate* previous_isolate;
EntryStackItem* previous_item;
+ private:
DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
};
@@ -1087,7 +1111,7 @@ class Isolate {
// If one does not yet exist, allocate a new one.
PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
-// PreInits and returns a default isolate. Needed when a new thread tries
+ // PreInits and returns a default isolate. Needed when a new thread tries
// to create a Locker for the first time (the lock itself is in the isolate).
static Isolate* GetDefaultIsolateForLocking();
@@ -1118,6 +1142,10 @@ class Isolate {
void InitializeDebugger();
+ // Traverse prototype chain to find out whether the object is derived from
+ // the Error object.
+ bool IsErrorObject(Handle<Object> obj);
+
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.
@@ -1164,6 +1192,7 @@ class Isolate {
bool fp_stubs_generated_;
StaticResource<SafeStringInputBuffer> compiler_safe_string_input_buffer_;
Builtins builtins_;
+ bool has_installed_extensions_;
StringTracker* string_tracker_;
unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
@@ -1173,9 +1202,19 @@ class Isolate {
unibrow::Mapping<unibrow::Ecma262Canonicalize>
regexp_macro_assembler_canonicalize_;
RegExpStack* regexp_stack_;
+
+ DateCache* date_cache_;
+
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
void* embedder_data_;
+ // The garbage collector should be a little more aggressive when it knows
+ // that a context was recently exited.
+ bool context_exit_happened_;
+
+ // Time stamp at initialization.
+ double time_millis_at_init_;
+
#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
bool simulator_initialized_;
diff --git a/src/3rdparty/v8/src/json-parser.h b/src/3rdparty/v8/src/json-parser.h
index ca796a6..d22cd0d 100644
--- a/src/3rdparty/v8/src/json-parser.h
+++ b/src/3rdparty/v8/src/json-parser.h
@@ -130,7 +130,7 @@ class JsonParser BASE_EMBEDDED {
// An object literal is a squiggly-braced and comma separated sequence
// (possibly empty) of key/value pairs, where the key is a JSON string
// literal, the value is a JSON value, and the two are separated by a colon.
- // A JSON array dosn't allow numbers and identifiers as keys, like a
+ // A JSON array doesn't allow numbers and identifiers as keys, like a
// JavaScript array.
Handle<Object> ParseJsonObject();
@@ -177,7 +177,7 @@ Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
// Set initial position right before the string.
position_ = -1;
- // Advance to the first character (posibly EOS)
+ // Advance to the first character (possibly EOS)
AdvanceSkipWhitespace();
Handle<Object> result = ParseJsonValue();
if (result.is_null() || c0_ != kEndOfString) {
@@ -303,11 +303,12 @@ Handle<Object> JsonParser<seq_ascii>::ParseJsonObject() {
uint32_t index;
if (key->AsArrayIndex(&index)) {
- SetOwnElement(json_object, index, value, kNonStrictMode);
+ JSObject::SetOwnElement(json_object, index, value, kNonStrictMode);
} else if (key->Equals(isolate()->heap()->Proto_symbol())) {
SetPrototype(json_object, value);
} else {
- SetLocalPropertyIgnoreAttributes(json_object, key, value, NONE);
+ JSObject::SetLocalPropertyIgnoreAttributes(
+ json_object, key, value, NONE);
}
} while (MatchSkipWhiteSpace(','));
if (c0_ != '}') {
diff --git a/src/3rdparty/v8/src/json.js b/src/3rdparty/v8/src/json.js
index deba126..ccef445 100644
--- a/src/3rdparty/v8/src/json.js
+++ b/src/3rdparty/v8/src/json.js
@@ -345,4 +345,4 @@ function SetUpJSON() {
));
}
-SetUpJSON()
+SetUpJSON();
diff --git a/src/3rdparty/v8/src/jsregexp.cc b/src/3rdparty/v8/src/jsregexp.cc
index 18ff257..b7d0d30 100644
--- a/src/3rdparty/v8/src/jsregexp.cc
+++ b/src/3rdparty/v8/src/jsregexp.cc
@@ -108,6 +108,36 @@ static inline void ThrowRegExpException(Handle<JSRegExp> re,
}
+// More makes code generation slower, less makes V8 benchmark score lower.
+const int kMaxLookaheadForBoyerMoore = 8;
+// In a 3-character pattern you can maximally step forwards 3 characters
+// at a time, which is not always enough to pay for the extra logic.
+const int kPatternTooShortForBoyerMoore = 2;
+
+
+// Identifies the sort of regexps where the regexp engine is faster
+// than the code used for atom matches.
+static bool HasFewDifferentCharacters(Handle<String> pattern) {
+ int length = Min(kMaxLookaheadForBoyerMoore, pattern->length());
+ if (length <= kPatternTooShortForBoyerMoore) return false;
+ const int kMod = 128;
+ bool character_found[kMod];
+ int different = 0;
+ memset(&character_found[0], 0, sizeof(character_found));
+ for (int i = 0; i < length; i++) {
+ int ch = (pattern->Get(i) & (kMod - 1));
+ if (!character_found[ch]) {
+ character_found[ch] = true;
+ different++;
+ // We declare a regexp low-alphabet if it has at least 3 times as many
+ // characters as it has different characters.
+ if (different * 3 > length) return false;
+ }
+ }
+ return true;
+}
+
+
// Generic RegExp methods. Dispatches to implementation specific methods.
@@ -141,9 +171,14 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
return Handle<Object>::null();
}
- if (parse_result.simple && !flags.is_ignore_case()) {
+ bool has_been_compiled = false;
+
+ if (parse_result.simple &&
+ !flags.is_ignore_case() &&
+ !HasFewDifferentCharacters(pattern)) {
// Parse-tree is a single atom that is equal to the pattern.
AtomCompile(re, pattern, flags, pattern);
+ has_been_compiled = true;
} else if (parse_result.tree->IsAtom() &&
!flags.is_ignore_case() &&
parse_result.capture_count == 0) {
@@ -151,8 +186,12 @@ Handle<Object> RegExpImpl::Compile(Handle<JSRegExp> re,
Vector<const uc16> atom_pattern = atom->data();
Handle<String> atom_string =
isolate->factory()->NewStringFromTwoByte(atom_pattern);
- AtomCompile(re, pattern, flags, atom_string);
- } else {
+ if (!HasFewDifferentCharacters(atom_string)) {
+ AtomCompile(re, pattern, flags, atom_string);
+ has_been_compiled = true;
+ }
+ }
+ if (!has_been_compiled) {
IrregexpInitialize(re, pattern, flags, parse_result.capture_count);
}
ASSERT(re->data()->IsFixedArray());
@@ -175,7 +214,8 @@ Handle<Object> RegExpImpl::Exec(Handle<JSRegExp> regexp,
case JSRegExp::IRREGEXP: {
Handle<Object> result =
IrregexpExec(regexp, subject, index, last_match_info);
- ASSERT(!result.is_null() || Isolate::Current()->has_pending_exception());
+ ASSERT(!result.is_null() ||
+ regexp->GetIsolate()->has_pending_exception());
return result;
}
default:
@@ -279,7 +319,8 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
// from the source pattern.
// If compilation fails, an exception is thrown and this function
// returns false.
-bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
+bool RegExpImpl::EnsureCompiledIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii) {
Object* compiled_code = re->DataAt(JSRegExp::code_index(is_ascii));
#ifdef V8_INTERPRETED_REGEXP
if (compiled_code->IsByteArray()) return true;
@@ -295,7 +336,7 @@ bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) {
ASSERT(compiled_code->IsSmi());
return true;
}
- return CompileIrregexp(re, is_ascii);
+ return CompileIrregexp(re, sample_subject, is_ascii);
}
@@ -315,7 +356,9 @@ static bool CreateRegExpErrorObjectAndThrow(Handle<JSRegExp> re,
}
-bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
+bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re,
+ Handle<String> sample_subject,
+ bool is_ascii) {
// Compile the RegExp.
Isolate* isolate = re->GetIsolate();
ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
@@ -364,6 +407,7 @@ bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) {
flags.is_ignore_case(),
flags.is_multiline(),
pattern,
+ sample_subject,
is_ascii);
if (result.error_message != NULL) {
// Unable to compile regexp.
@@ -434,7 +478,7 @@ int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
// Check the asciiness of the underlying storage.
bool is_ascii = subject->IsAsciiRepresentationUnderneath();
- if (!EnsureCompiledIrregexp(regexp, is_ascii)) return -1;
+ if (!EnsureCompiledIrregexp(regexp, subject, is_ascii)) return -1;
#ifdef V8_INTERPRETED_REGEXP
// Byte-code regexp needs space allocated for all its registers.
@@ -465,7 +509,7 @@ RegExpImpl::IrregexpResult RegExpImpl::IrregexpExecOnce(
#ifndef V8_INTERPRETED_REGEXP
ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
do {
- EnsureCompiledIrregexp(regexp, is_ascii);
+ EnsureCompiledIrregexp(regexp, subject, is_ascii);
Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
NativeRegExpMacroAssembler::Result res =
NativeRegExpMacroAssembler::Match(code,
@@ -527,6 +571,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
Handle<String> subject,
int previous_index,
Handle<JSArray> last_match_info) {
+ Isolate* isolate = jsregexp->GetIsolate();
ASSERT_EQ(jsregexp->TypeTag(), JSRegExp::IRREGEXP);
// Prepare space for the return values.
@@ -542,11 +587,11 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
int required_registers = RegExpImpl::IrregexpPrepare(jsregexp, subject);
if (required_registers < 0) {
// Compiling failed with an exception.
- ASSERT(Isolate::Current()->has_pending_exception());
+ ASSERT(isolate->has_pending_exception());
return Handle<Object>::null();
}
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
IrregexpResult res = RegExpImpl::IrregexpExecOnce(
jsregexp, subject, previous_index, Vector<int>(registers.vector(),
@@ -568,11 +613,11 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
return last_match_info;
}
if (res == RE_EXCEPTION) {
- ASSERT(Isolate::Current()->has_pending_exception());
+ ASSERT(isolate->has_pending_exception());
return Handle<Object>::null();
}
ASSERT(res == RE_FAILURE);
- return Isolate::Current()->factory()->null_value();
+ return isolate->factory()->null_value();
}
@@ -704,7 +749,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp,
// the virtualized backtrack stack and some register changes. When a node is
// to be emitted it can flush the Trace or update it. Flushing the Trace
// will emit code to bring the actual state into line with the virtual state.
-// Avoiding flushing the state can postpone some work (eg updates of capture
+// Avoiding flushing the state can postpone some work (e.g. updates of capture
// registers). Postponing work can save time when executing the regular
// expression since it may be found that the work never has to be done as a
// failure to match can occur. In addition it is much faster to jump to a
@@ -782,6 +827,53 @@ DispatchTable* ChoiceNode::GetTable(bool ignore_case) {
}
+class FrequencyCollator {
+ public:
+ FrequencyCollator() : total_samples_(0) {
+ for (int i = 0; i < RegExpMacroAssembler::kTableSize; i++) {
+ frequencies_[i] = CharacterFrequency(i);
+ }
+ }
+
+ void CountCharacter(int character) {
+ int index = (character & RegExpMacroAssembler::kTableMask);
+ frequencies_[index].Increment();
+ total_samples_++;
+ }
+
+ // Does not measure in percent, but rather per-128 (the table size from the
+ // regexp macro assembler).
+ int Frequency(int in_character) {
+ ASSERT((in_character & RegExpMacroAssembler::kTableMask) == in_character);
+ if (total_samples_ < 1) return 1; // Division by zero.
+ int freq_in_per128 =
+ (frequencies_[in_character].counter() * 128) / total_samples_;
+ return freq_in_per128;
+ }
+
+ private:
+ class CharacterFrequency {
+ public:
+ CharacterFrequency() : counter_(0), character_(-1) { }
+ explicit CharacterFrequency(int character)
+ : counter_(0), character_(character) { }
+
+ void Increment() { counter_++; }
+ int counter() { return counter_; }
+ int character() { return character_; }
+
+ private:
+ int counter_;
+ int character_;
+ };
+
+
+ private:
+ CharacterFrequency frequencies_[RegExpMacroAssembler::kTableSize];
+ int total_samples_;
+};
+
+
class RegExpCompiler {
public:
RegExpCompiler(int capture_count, bool ignore_case, bool is_ascii);
@@ -817,6 +909,7 @@ class RegExpCompiler {
inline bool ignore_case() { return ignore_case_; }
inline bool ascii() { return ascii_; }
+ FrequencyCollator* frequency_collator() { return &frequency_collator_; }
int current_expansion_factor() { return current_expansion_factor_; }
void set_current_expansion_factor(int value) {
@@ -835,6 +928,7 @@ class RegExpCompiler {
bool ascii_;
bool reg_exp_too_big_;
int current_expansion_factor_;
+ FrequencyCollator frequency_collator_;
};
@@ -863,7 +957,8 @@ RegExpCompiler::RegExpCompiler(int capture_count, bool ignore_case, bool ascii)
ignore_case_(ignore_case),
ascii_(ascii),
reg_exp_too_big_(false),
- current_expansion_factor_(1) {
+ current_expansion_factor_(1),
+ frequency_collator_() {
accept_ = new EndNode(EndNode::ACCEPT);
ASSERT(next_register_ - 1 <= RegExpMacroAssembler::kMaxRegister);
}
@@ -1442,7 +1537,7 @@ static bool ShortCutEmitCharacterPair(RegExpMacroAssembler* macro_assembler,
if (ascii) {
char_mask = String::kMaxAsciiCharCode;
} else {
- char_mask = String::kMaxUC16CharCode;
+ char_mask = String::kMaxUtf16CodeUnit;
}
uc16 exor = c1 ^ c2;
// Check whether exor has only one bit set.
@@ -1532,6 +1627,357 @@ static inline bool EmitAtomLetter(Isolate* isolate,
}
+static void EmitBoundaryTest(RegExpMacroAssembler* masm,
+ int border,
+ Label* fall_through,
+ Label* above_or_equal,
+ Label* below) {
+ if (below != fall_through) {
+ masm->CheckCharacterLT(border, below);
+ if (above_or_equal != fall_through) masm->GoTo(above_or_equal);
+ } else {
+ masm->CheckCharacterGT(border - 1, above_or_equal);
+ }
+}
+
+
+static void EmitDoubleBoundaryTest(RegExpMacroAssembler* masm,
+ int first,
+ int last,
+ Label* fall_through,
+ Label* in_range,
+ Label* out_of_range) {
+ if (in_range == fall_through) {
+ if (first == last) {
+ masm->CheckNotCharacter(first, out_of_range);
+ } else {
+ masm->CheckCharacterNotInRange(first, last, out_of_range);
+ }
+ } else {
+ if (first == last) {
+ masm->CheckCharacter(first, in_range);
+ } else {
+ masm->CheckCharacterInRange(first, last, in_range);
+ }
+ if (out_of_range != fall_through) masm->GoTo(out_of_range);
+ }
+}
+
+
+// even_label is for ranges[i] to ranges[i + 1] where i - start_index is even.
+// odd_label is for ranges[i] to ranges[i + 1] where i - start_index is odd.
+static void EmitUseLookupTable(
+ RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int min_char,
+ Label* fall_through,
+ Label* even_label,
+ Label* odd_label) {
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ static const int kMask = RegExpMacroAssembler::kTableMask;
+
+ int base = (min_char & ~kMask);
+ USE(base);
+
+ // Assert that everything is on one kTableSize page.
+ for (int i = start_index; i <= end_index; i++) {
+ ASSERT_EQ(ranges->at(i) & ~kMask, base);
+ }
+ ASSERT(start_index == 0 || (ranges->at(start_index - 1) & ~kMask) <= base);
+
+ char templ[kSize];
+ Label* on_bit_set;
+ Label* on_bit_clear;
+ int bit;
+ if (even_label == fall_through) {
+ on_bit_set = odd_label;
+ on_bit_clear = even_label;
+ bit = 1;
+ } else {
+ on_bit_set = even_label;
+ on_bit_clear = odd_label;
+ bit = 0;
+ }
+ for (int i = 0; i < (ranges->at(start_index) & kMask) && i < kSize; i++) {
+ templ[i] = bit;
+ }
+ int j = 0;
+ bit ^= 1;
+ for (int i = start_index; i < end_index; i++) {
+ for (j = (ranges->at(i) & kMask); j < (ranges->at(i + 1) & kMask); j++) {
+ templ[j] = bit;
+ }
+ bit ^= 1;
+ }
+ for (int i = j; i < kSize; i++) {
+ templ[i] = bit;
+ }
+ // TODO(erikcorry): Cache these.
+ Handle<ByteArray> ba = FACTORY->NewByteArray(kSize, TENURED);
+ for (int i = 0; i < kSize; i++) {
+ ba->set(i, templ[i]);
+ }
+ masm->CheckBitInTable(ba, on_bit_set);
+ if (on_bit_clear != fall_through) masm->GoTo(on_bit_clear);
+}
+
+
+static void CutOutRange(RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int cut_index,
+ Label* even_label,
+ Label* odd_label) {
+ bool odd = (((cut_index - start_index) & 1) == 1);
+ Label* in_range_label = odd ? odd_label : even_label;
+ Label dummy;
+ EmitDoubleBoundaryTest(masm,
+ ranges->at(cut_index),
+ ranges->at(cut_index + 1) - 1,
+ &dummy,
+ in_range_label,
+ &dummy);
+ ASSERT(!dummy.is_linked());
+ // Cut out the single range by rewriting the array. This creates a new
+ // range that is a merger of the two ranges on either side of the one we
+ // are cutting out. The oddity of the labels is preserved.
+ for (int j = cut_index; j > start_index; j--) {
+ ranges->at(j) = ranges->at(j - 1);
+ }
+ for (int j = cut_index + 1; j < end_index; j++) {
+ ranges->at(j) = ranges->at(j + 1);
+ }
+}
+
+
+// Unicode case. Split the search space into kSize spaces that are handled
+// with recursion.
+static void SplitSearchSpace(ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ int* new_start_index,
+ int* new_end_index,
+ int* border) {
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ static const int kMask = RegExpMacroAssembler::kTableMask;
+
+ int first = ranges->at(start_index);
+ int last = ranges->at(end_index) - 1;
+
+ *new_start_index = start_index;
+ *border = (ranges->at(start_index) & ~kMask) + kSize;
+ while (*new_start_index < end_index) {
+ if (ranges->at(*new_start_index) > *border) break;
+ (*new_start_index)++;
+ }
+ // new_start_index is the index of the first edge that is beyond the
+ // current kSize space.
+
+ // For very large search spaces we do a binary chop search of the non-ASCII
+ // space instead of just going to the end of the current kSize space. The
+ // heuristics are complicated a little by the fact that any 128-character
+ // encoding space can be quickly tested with a table lookup, so we don't
+ // wish to do binary chop search at a smaller granularity than that. A
+ // 128-character space can take up a lot of space in the ranges array if,
+ // for example, we only want to match every second character (eg. the lower
+ // case characters on some Unicode pages).
+ int binary_chop_index = (end_index + start_index) / 2;
+ // The first test ensures that we get to the code that handles the ASCII
+ // range with a single not-taken branch, speeding up this important
+ // character range (even non-ASCII charset-based text has spaces and
+ // punctuation).
+ if (*border - 1 > String::kMaxAsciiCharCode && // ASCII case.
+ end_index - start_index > (*new_start_index - start_index) * 2 &&
+ last - first > kSize * 2 &&
+ binary_chop_index > *new_start_index &&
+ ranges->at(binary_chop_index) >= first + 2 * kSize) {
+ int scan_forward_for_section_border = binary_chop_index;;
+ int new_border = (ranges->at(binary_chop_index) | kMask) + 1;
+
+ while (scan_forward_for_section_border < end_index) {
+ if (ranges->at(scan_forward_for_section_border) > new_border) {
+ *new_start_index = scan_forward_for_section_border;
+ *border = new_border;
+ break;
+ }
+ scan_forward_for_section_border++;
+ }
+ }
+
+ ASSERT(*new_start_index > start_index);
+ *new_end_index = *new_start_index - 1;
+ if (ranges->at(*new_end_index) == *border) {
+ (*new_end_index)--;
+ }
+ if (*border >= ranges->at(end_index)) {
+ *border = ranges->at(end_index);
+ *new_start_index = end_index; // Won't be used.
+ *new_end_index = end_index - 1;
+ }
+}
+
+
+// Gets a series of segment boundaries representing a character class. If the
+// character is in the range between an even and an odd boundary (counting from
+// start_index) then go to even_label, otherwise go to odd_label. We already
+// know that the character is in the range of min_char to max_char inclusive.
+// Either label can be NULL indicating backtracking. Either label can also be
+// equal to the fall_through label.
+static void GenerateBranches(RegExpMacroAssembler* masm,
+ ZoneList<int>* ranges,
+ int start_index,
+ int end_index,
+ uc16 min_char,
+ uc16 max_char,
+ Label* fall_through,
+ Label* even_label,
+ Label* odd_label) {
+ int first = ranges->at(start_index);
+ int last = ranges->at(end_index) - 1;
+
+ ASSERT_LT(min_char, first);
+
+ // Just need to test if the character is before or on-or-after
+ // a particular character.
+ if (start_index == end_index) {
+ EmitBoundaryTest(masm, first, fall_through, even_label, odd_label);
+ return;
+ }
+
+ // Another almost trivial case: There is one interval in the middle that is
+ // different from the end intervals.
+ if (start_index + 1 == end_index) {
+ EmitDoubleBoundaryTest(
+ masm, first, last, fall_through, even_label, odd_label);
+ return;
+ }
+
+ // It's not worth using table lookup if there are very few intervals in the
+ // character class.
+ if (end_index - start_index <= 6) {
+ // It is faster to test for individual characters, so we look for those
+ // first, then try arbitrary ranges in the second round.
+ static int kNoCutIndex = -1;
+ int cut = kNoCutIndex;
+ for (int i = start_index; i < end_index; i++) {
+ if (ranges->at(i) == ranges->at(i + 1) - 1) {
+ cut = i;
+ break;
+ }
+ }
+ if (cut == kNoCutIndex) cut = start_index;
+ CutOutRange(
+ masm, ranges, start_index, end_index, cut, even_label, odd_label);
+ ASSERT_GE(end_index - start_index, 2);
+ GenerateBranches(masm,
+ ranges,
+ start_index + 1,
+ end_index - 1,
+ min_char,
+ max_char,
+ fall_through,
+ even_label,
+ odd_label);
+ return;
+ }
+
+ // If there are a lot of intervals in the regexp, then we will use tables to
+ // determine whether the character is inside or outside the character class.
+ static const int kBits = RegExpMacroAssembler::kTableSizeBits;
+
+ if ((max_char >> kBits) == (min_char >> kBits)) {
+ EmitUseLookupTable(masm,
+ ranges,
+ start_index,
+ end_index,
+ min_char,
+ fall_through,
+ even_label,
+ odd_label);
+ return;
+ }
+
+ if ((min_char >> kBits) != (first >> kBits)) {
+ masm->CheckCharacterLT(first, odd_label);
+ GenerateBranches(masm,
+ ranges,
+ start_index + 1,
+ end_index,
+ first,
+ max_char,
+ fall_through,
+ odd_label,
+ even_label);
+ return;
+ }
+
+ int new_start_index = 0;
+ int new_end_index = 0;
+ int border = 0;
+
+ SplitSearchSpace(ranges,
+ start_index,
+ end_index,
+ &new_start_index,
+ &new_end_index,
+ &border);
+
+ Label handle_rest;
+ Label* above = &handle_rest;
+ if (border == last + 1) {
+ // We didn't find any section that started after the limit, so everything
+ // above the border is one of the terminal labels.
+ above = (end_index & 1) != (start_index & 1) ? odd_label : even_label;
+ ASSERT(new_end_index == end_index - 1);
+ }
+
+ ASSERT_LE(start_index, new_end_index);
+ ASSERT_LE(new_start_index, end_index);
+ ASSERT_LT(start_index, new_start_index);
+ ASSERT_LT(new_end_index, end_index);
+ ASSERT(new_end_index + 1 == new_start_index ||
+ (new_end_index + 2 == new_start_index &&
+ border == ranges->at(new_end_index + 1)));
+ ASSERT_LT(min_char, border - 1);
+ ASSERT_LT(border, max_char);
+ ASSERT_LT(ranges->at(new_end_index), border);
+ ASSERT(border < ranges->at(new_start_index) ||
+ (border == ranges->at(new_start_index) &&
+ new_start_index == end_index &&
+ new_end_index == end_index - 1 &&
+ border == last + 1));
+ ASSERT(new_start_index == 0 || border >= ranges->at(new_start_index - 1));
+
+ masm->CheckCharacterGT(border - 1, above);
+ Label dummy;
+ GenerateBranches(masm,
+ ranges,
+ start_index,
+ new_end_index,
+ min_char,
+ border - 1,
+ &dummy,
+ even_label,
+ odd_label);
+ if (handle_rest.is_linked()) {
+ masm->Bind(&handle_rest);
+ bool flip = (new_start_index & 1) != (start_index & 1);
+ GenerateBranches(masm,
+ ranges,
+ new_start_index,
+ end_index,
+ border,
+ max_char,
+ &dummy,
+ flip ? odd_label : even_label,
+ flip ? even_label : odd_label);
+ }
+}
+
+
static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
RegExpCharacterClass* cc,
bool ascii,
@@ -1540,18 +1986,17 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
bool check_offset,
bool preloaded) {
ZoneList<CharacterRange>* ranges = cc->ranges();
+ if (!CharacterRange::IsCanonical(ranges)) {
+ CharacterRange::Canonicalize(ranges);
+ }
+
int max_char;
if (ascii) {
max_char = String::kMaxAsciiCharCode;
} else {
- max_char = String::kMaxUC16CharCode;
+ max_char = String::kMaxUtf16CodeUnit;
}
- Label success;
-
- Label* char_is_in_class =
- cc->is_negated() ? on_failure : &success;
-
int range_count = ranges->length();
int last_valid_range = range_count - 1;
@@ -1565,8 +2010,6 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
if (last_valid_range < 0) {
if (!cc->is_negated()) {
- // TODO(plesner): We can remove this when the node level does our
- // ASCII optimizations for us.
macro_assembler->GoTo(on_failure);
}
if (check_offset) {
@@ -1576,6 +2019,18 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
}
if (last_valid_range == 0 &&
+ ranges->at(0).IsEverything(max_char)) {
+ if (cc->is_negated()) {
+ macro_assembler->GoTo(on_failure);
+ } else {
+ // This is a common case hit by non-anchored expressions.
+ if (check_offset) {
+ macro_assembler->CheckPosition(cp_offset, on_failure);
+ }
+ }
+ return;
+ }
+ if (last_valid_range == 0 &&
!cc->is_negated() &&
ranges->at(0).IsEverything(max_char)) {
// This is a common case hit by non-anchored expressions.
@@ -1595,64 +2050,43 @@ static void EmitCharClass(RegExpMacroAssembler* macro_assembler,
return;
}
- for (int i = 0; i < last_valid_range; i++) {
- CharacterRange& range = ranges->at(i);
- Label next_range;
- uc16 from = range.from();
- uc16 to = range.to();
- if (from > max_char) {
- continue;
- }
- if (to > max_char) to = max_char;
- if (to == from) {
- macro_assembler->CheckCharacter(to, char_is_in_class);
- } else {
- if (from != 0) {
- macro_assembler->CheckCharacterLT(from, &next_range);
- }
- if (to != max_char) {
- macro_assembler->CheckCharacterLT(to + 1, char_is_in_class);
- } else {
- macro_assembler->GoTo(char_is_in_class);
- }
- }
- macro_assembler->Bind(&next_range);
- }
- CharacterRange& range = ranges->at(last_valid_range);
- uc16 from = range.from();
- uc16 to = range.to();
+ // A new list with ascending entries. Each entry is a code unit
+ // where there is a boundary between code units that are part of
+ // the class and code units that are not. Normally we insert an
+ // entry at zero which goes to the failure label, but if there
+ // was already one there we fall through for success on that entry.
+ // Subsequent entries have alternating meaning (success/failure).
+ ZoneList<int>* range_boundaries = new ZoneList<int>(last_valid_range);
- if (to > max_char) to = max_char;
- ASSERT(to >= from);
+ bool zeroth_entry_is_failure = !cc->is_negated();
- if (to == from) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacter(to, on_failure);
- } else {
- macro_assembler->CheckNotCharacter(to, on_failure);
- }
- } else {
- if (from != 0) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacterLT(from, &success);
- } else {
- macro_assembler->CheckCharacterLT(from, on_failure);
- }
- }
- if (to != String::kMaxUC16CharCode) {
- if (cc->is_negated()) {
- macro_assembler->CheckCharacterLT(to + 1, on_failure);
- } else {
- macro_assembler->CheckCharacterGT(to, on_failure);
- }
+ for (int i = 0; i <= last_valid_range; i++) {
+ CharacterRange& range = ranges->at(i);
+ if (range.from() == 0) {
+ ASSERT_EQ(i, 0);
+ zeroth_entry_is_failure = !zeroth_entry_is_failure;
} else {
- if (cc->is_negated()) {
- macro_assembler->GoTo(on_failure);
- }
+ range_boundaries->Add(range.from());
}
+ range_boundaries->Add(range.to() + 1);
+ }
+ int end_index = range_boundaries->length() - 1;
+ if (range_boundaries->at(end_index) > max_char) {
+ end_index--;
}
- macro_assembler->Bind(&success);
+
+ Label fall_through;
+ GenerateBranches(macro_assembler,
+ range_boundaries,
+ 0, // start_index.
+ end_index,
+ 0, // min_char.
+ max_char,
+ &fall_through,
+ zeroth_entry_is_failure ? &fall_through : on_failure,
+ zeroth_entry_is_failure ? on_failure : &fall_through);
+ macro_assembler->Bind(&fall_through);
}
@@ -1715,6 +2149,17 @@ int ActionNode::EatsAtLeast(int still_to_find,
}
+void ActionNode::FillInBMInfo(int offset,
+ BoyerMooreLookahead* bm,
+ bool not_at_start) {
+ if (type_ == BEGIN_SUBMATCH) {
+ bm->SetRest(offset);
+ } else if (type_ != POSITIVE_SUBMATCH_SUCCESS) {
+ on_success()->FillInBMInfo(offset, bm, not_at_start);
+ }
+}
+
+
int AssertionNode::EatsAtLeast(int still_to_find,
int recursion_depth,
bool not_at_start) {
@@ -1731,6 +2176,14 @@ int AssertionNode::EatsAtLeast(int still_to_find,
}
+void AssertionNode::FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ // Match the behaviour of EatsAtLeast on this node.
+ if (type() == AT_START && not_at_start) return;
+ on_success()->FillInBMInfo(offset, bm, not_at_start);
+}
+
+
int BackReferenceNode::EatsAtLeast(int still_to_find,
int recursion_depth,
bool not_at_start) {
@@ -1833,7 +2286,7 @@ bool QuickCheckDetails::Rationalize(bool asc) {
if (asc) {
char_mask = String::kMaxAsciiCharCode;
} else {
- char_mask = String::kMaxUC16CharCode;
+ char_mask = String::kMaxUtf16CodeUnit;
}
mask_ = 0;
value_ = 0;
@@ -1885,7 +2338,7 @@ bool RegExpNode::EmitQuickCheck(RegExpCompiler* compiler,
if (compiler->ascii()) {
char_mask = String::kMaxAsciiCharCode;
} else {
- char_mask = String::kMaxUC16CharCode;
+ char_mask = String::kMaxUtf16CodeUnit;
}
if ((mask & char_mask) == char_mask) need_mask = false;
mask &= char_mask;
@@ -1937,7 +2390,7 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details,
if (compiler->ascii()) {
char_mask = String::kMaxAsciiCharCode;
} else {
- char_mask = String::kMaxUC16CharCode;
+ char_mask = String::kMaxUtf16CodeUnit;
}
for (int k = 0; k < elms_->length(); k++) {
TextElement elm = elms_->at(k);
@@ -2163,6 +2616,16 @@ void LoopChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
}
+void LoopChoiceNode::FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool nas) {
+ if (body_can_be_zero_length_) {
+ bm->SetRest(offset);
+ return;
+ }
+ ChoiceNode::FillInBMInfo(offset, bm, nas);
+}
+
+
void ChoiceNode::GetQuickCheckDetails(QuickCheckDetails* details,
RegExpCompiler* compiler,
int characters_filled_in,
@@ -2636,7 +3099,7 @@ void TextNode::MakeCaseIndependent(bool is_ascii) {
TextElement elm = elms_->at(i);
if (elm.type == TextElement::CHAR_CLASS) {
RegExpCharacterClass* cc = elm.data.u_char_class;
- // None of the standard character classses is different in the case
+ // None of the standard character classes is different in the case
// independent case and it slows us down if we don't know that.
if (cc->is_standard()) continue;
ZoneList<CharacterRange>* ranges = cc->ranges();
@@ -2659,6 +3122,30 @@ int TextNode::GreedyLoopTextLength() {
}
+RegExpNode* TextNode::GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler) {
+ if (elms_->length() != 1) return NULL;
+ TextElement elm = elms_->at(0);
+ if (elm.type != TextElement::CHAR_CLASS) return NULL;
+ RegExpCharacterClass* node = elm.data.u_char_class;
+ ZoneList<CharacterRange>* ranges = node->ranges();
+ if (!CharacterRange::IsCanonical(ranges)) {
+ CharacterRange::Canonicalize(ranges);
+ }
+ if (node->is_negated()) {
+ return ranges->length() == 0 ? on_success() : NULL;
+ }
+ if (ranges->length() != 1) return NULL;
+ uint32_t max_char;
+ if (compiler->ascii()) {
+ max_char = String::kMaxAsciiCharCode;
+ } else {
+ max_char = String::kMaxUtf16CodeUnit;
+ }
+ return ranges->at(0).IsEverything(max_char) ? on_success() : NULL;
+}
+
+
// Finds the fixed match length of a sequence of nodes that goes from
// this alternative and back to this choice node. If there are variable
// length nodes or other complications in the way then return a sentinel
@@ -2723,8 +3210,8 @@ void LoopChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
int ChoiceNode::CalculatePreloadCharacters(RegExpCompiler* compiler,
- bool not_at_start) {
- int preload_characters = EatsAtLeast(4, 0, not_at_start);
+ int eats_at_least) {
+ int preload_characters = Min(4, eats_at_least);
if (compiler->macro_assembler()->CanReadUnaligned()) {
bool ascii = compiler->ascii();
if (ascii) {
@@ -2790,6 +3277,198 @@ class AlternativeGenerationList {
};
+BoyerMooreLookahead::BoyerMooreLookahead(
+ int length, int map_length, RegExpCompiler* compiler)
+ : length_(length),
+ map_length_(map_length),
+ compiler_(compiler) {
+ ASSERT(IsPowerOf2(map_length));
+ if (compiler->ascii()) {
+ max_char_ = String::kMaxAsciiCharCode;
+ } else {
+ max_char_ = String::kMaxUtf16CodeUnit;
+ }
+ bitmaps_ = new ZoneList<ZoneList<bool>*>(length);
+ for (int i = 0; i < length; i++) {
+ bitmaps_->Add(new ZoneList<bool>(map_length));
+ ZoneList<bool>* map = bitmaps_->at(i);
+ for (int i = 0; i < map_length; i++) {
+ map->Add(false);
+ }
+ }
+}
+
+
+// Find the longest range of lookahead that has the fewest number of different
+// characters that can occur at a given position. Since we are optimizing two
+// different parameters at once this is a tradeoff.
+bool BoyerMooreLookahead::FindWorthwhileInterval(int* from, int* to) {
+ int biggest_points = 0;
+ for (int max_number_of_chars = 4;
+ max_number_of_chars < kTooManyCharacters;
+ max_number_of_chars *= 2) {
+ biggest_points =
+ FindBestInterval(max_number_of_chars, biggest_points, from, to);
+ }
+ if (biggest_points == 0) return false;
+ return true;
+}
+
+
+// Find the highest-points range between 0 and length_ where the character
+// information is not too vague. 'Too vague' means that there are more than
+// max_number_of_chars that can occur at this position. Calculates the number
+// of points as the product of width-of-the-range and
+// probability-of-finding-one-of-the-characters, where the probability is
+// calculated using the frequency distribution of the sample subject string.
+int BoyerMooreLookahead::FindBestInterval(
+ int max_number_of_chars, int old_biggest_points, int* from, int* to) {
+ int biggest_points = old_biggest_points;
+ static const int kSize = RegExpMacroAssembler::kTableSize;
+ for (int i = 0; i < length_; ) {
+ while (i < length_ && Count(i) > max_number_of_chars) i++;
+ if (i == length_) break;
+ int remembered_from = i;
+ bool union_map[kSize];
+ for (int j = 0; j < kSize; j++) union_map[j] = false;
+ while (i < length_ && Count(i) <= max_number_of_chars) {
+ ZoneList<bool>* map = bitmaps_->at(i);
+ for (int j = 0; j < kSize; j++) union_map[j] |= map->at(j);
+ i++;
+ }
+ int frequency = 0;
+ for (int j = 0; j < kSize; j++) {
+ if (union_map[j]) {
+ // Add 1 to the frequency to give a small per-character boost for
+ // the cases where our sampling is not good enough and many
+ // characters have a frequency of zero. This means the frequency
+ // can theoretically be up to 2*kSize though we treat it mostly as
+ // a fraction of kSize.
+ frequency += compiler_->frequency_collator()->Frequency(j) + 1;
+ }
+ }
+ // We use the probability of skipping times the distance we are skipping to
+ // judge the effectiveness of this. Actually we have a cut-off: By
+ // dividing by 2 we switch off the skipping if the probability of skipping
+ // is less than 50%. This is because the multibyte mask-and-compare
+ // skipping in quickcheck is more likely to do well on this case.
+ bool in_quickcheck_range = ((i - remembered_from < 4) ||
+ (compiler_->ascii() ? remembered_from <= 4 : remembered_from <= 2));
+ // Called 'probability' but it is only a rough estimate and can actually
+ // be outside the 0-kSize range.
+ int probability = (in_quickcheck_range ? kSize / 2 : kSize) - frequency;
+ int points = (i - remembered_from) * probability;
+ if (points > biggest_points) {
+ *from = remembered_from;
+ *to = i - 1;
+ biggest_points = points;
+ }
+ }
+ return biggest_points;
+}
+
+
+// Take all the characters that will not prevent a successful match if they
+// occur in the subject string in the range between min_lookahead and
+// max_lookahead (inclusive) measured from the current position. If the
+// character at max_lookahead offset is not one of these characters, then we
+// can safely skip forwards by the number of characters in the range.
+int BoyerMooreLookahead::GetSkipTable(int min_lookahead,
+ int max_lookahead,
+ Handle<ByteArray> boolean_skip_table) {
+ const int kSize = RegExpMacroAssembler::kTableSize;
+
+ const int kSkipArrayEntry = 0;
+ const int kDontSkipArrayEntry = 1;
+
+ for (int i = 0; i < kSize; i++) {
+ boolean_skip_table->set(i, kSkipArrayEntry);
+ }
+ int skip = max_lookahead + 1 - min_lookahead;
+
+ for (int i = max_lookahead; i >= min_lookahead; i--) {
+ ZoneList<bool>* map = bitmaps_->at(i);
+ for (int j = 0; j < map_length_; j++) {
+ if (map->at(j)) {
+ boolean_skip_table->set(j, kDontSkipArrayEntry);
+ }
+ }
+ }
+
+ return skip;
+}
+
+
+// See comment above on the implementation of GetSkipTable.
+bool BoyerMooreLookahead::EmitSkipInstructions(RegExpMacroAssembler* masm) {
+ int min_lookahead = 0;
+ int max_lookahead = 0;
+
+ if (!FindWorthwhileInterval(&min_lookahead, &max_lookahead)) return false;
+
+ bool found_single_character = false;
+ bool abandoned_search_for_single_character = false;
+ int single_character = 0;
+ for (int i = max_lookahead; i >= min_lookahead; i--) {
+ ZoneList<bool>* map = bitmaps_->at(i);
+ for (int j = 0; j < map_length_; j++) {
+ if (map->at(j)) {
+ if (found_single_character) {
+ found_single_character = false; // Found two.
+ abandoned_search_for_single_character = true;
+ break;
+ } else {
+ found_single_character = true;
+ single_character = j;
+ }
+ }
+ }
+ if (abandoned_search_for_single_character) break;
+ }
+
+ int lookahead_width = max_lookahead + 1 - min_lookahead;
+
+ if (found_single_character && lookahead_width == 1 && max_lookahead < 3) {
+ // The mask-compare can probably handle this better.
+ return false;
+ }
+
+ if (found_single_character) {
+ Label cont, again;
+ masm->Bind(&again);
+ masm->LoadCurrentCharacter(max_lookahead, &cont, true);
+ if (max_char_ > map_length_) {
+ ASSERT(map_length_ == RegExpMacroAssembler::kTableSize);
+ masm->CheckCharacterAfterAnd(single_character,
+ RegExpMacroAssembler::kTableMask,
+ &cont);
+ } else {
+ masm->CheckCharacter(single_character, &cont);
+ }
+ masm->AdvanceCurrentPosition(lookahead_width);
+ masm->GoTo(&again);
+ masm->Bind(&cont);
+ return true;
+ }
+
+ Handle<ByteArray> boolean_skip_table =
+ FACTORY->NewByteArray(map_length_, TENURED);
+ int skip_distance = GetSkipTable(
+ min_lookahead, max_lookahead, boolean_skip_table);
+ ASSERT(skip_distance != 0);
+
+ Label cont, again;
+ masm->Bind(&again);
+ masm->LoadCurrentCharacter(max_lookahead, &cont, true);
+ masm->CheckBitInTable(boolean_skip_table, &cont);
+ masm->AdvanceCurrentPosition(skip_distance);
+ masm->GoTo(&again);
+ masm->Bind(&cont);
+
+ return true;
+}
+
+
/* Code generation for choice nodes.
*
* We generate quick checks that do a mask and compare to eliminate a
@@ -2868,7 +3547,6 @@ class AlternativeGenerationList {
* \______________/
*/
-
void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
RegExpMacroAssembler* macro_assembler = compiler->macro_assembler();
int choice_count = alternatives_->length();
@@ -2933,10 +3611,48 @@ void ChoiceNode::Emit(RegExpCompiler* compiler, Trace* trace) {
int first_normal_choice = greedy_loop ? 1 : 0;
- int preload_characters =
- CalculatePreloadCharacters(compiler,
- current_trace->at_start() == Trace::FALSE);
- bool preload_is_current =
+ bool not_at_start = current_trace->at_start() == Trace::FALSE;
+ const int kEatsAtLeastNotYetInitialized = -1;
+ int eats_at_least = kEatsAtLeastNotYetInitialized;
+
+ bool skip_was_emitted = false;
+
+ if (!greedy_loop && choice_count == 2) {
+ GuardedAlternative alt1 = alternatives_->at(1);
+ if (alt1.guards() == NULL || alt1.guards()->length() == 0) {
+ RegExpNode* eats_anything_node = alt1.node();
+ if (eats_anything_node->GetSuccessorOfOmnivorousTextNode(compiler) ==
+ this) {
+ // At this point we know that we are at a non-greedy loop that will eat
+ // any character one at a time. Any non-anchored regexp has such a
+ // loop prepended to it in order to find where it starts. We look for
+ // a pattern of the form ...abc... where we can look 6 characters ahead
+ // and step forwards 3 if the character is not one of abc. Abc need
+ // not be atoms, they can be any reasonably limited character class or
+ // small alternation.
+ ASSERT(trace->is_trivial()); // This is the case on LoopChoiceNodes.
+ eats_at_least =
+ Min(kMaxLookaheadForBoyerMoore,
+ EatsAtLeast(kMaxLookaheadForBoyerMoore, 0, not_at_start));
+ if (eats_at_least >= 1) {
+ BoyerMooreLookahead bm(eats_at_least,
+ RegExpMacroAssembler::kTableSize,
+ compiler);
+ GuardedAlternative alt0 = alternatives_->at(0);
+ alt0.node()->FillInBMInfo(0, &bm, not_at_start);
+ skip_was_emitted = bm.EmitSkipInstructions(macro_assembler);
+ }
+ }
+ }
+ }
+
+ if (eats_at_least == kEatsAtLeastNotYetInitialized) {
+ // Save some time by looking at most one machine word ahead.
+ eats_at_least = EatsAtLeast(compiler->ascii() ? 4 : 2, 0, not_at_start);
+ }
+ int preload_characters = CalculatePreloadCharacters(compiler, eats_at_least);
+
+ bool preload_is_current = !skip_was_emitted &&
(current_trace->characters_preloaded() == preload_characters);
bool preload_has_checked_bounds = preload_is_current;
@@ -3597,22 +4313,20 @@ void RegExpEngine::DotPrint(const char* label,
// -------------------------------------------------------------------
// Tree to graph conversion
-static const int kSpaceRangeCount = 20;
-static const int kSpaceRangeAsciiCount = 4;
-static const uc16 kSpaceRanges[kSpaceRangeCount] = { 0x0009, 0x000D, 0x0020,
- 0x0020, 0x00A0, 0x00A0, 0x1680, 0x1680, 0x180E, 0x180E, 0x2000, 0x200A,
- 0x2028, 0x2029, 0x202F, 0x202F, 0x205F, 0x205F, 0x3000, 0x3000 };
+static const uc16 kSpaceRanges[] = { 0x0009, 0x000D, 0x0020, 0x0020, 0x00A0,
+ 0x00A0, 0x1680, 0x1680, 0x180E, 0x180E, 0x2000, 0x200A, 0x2028, 0x2029,
+ 0x202F, 0x202F, 0x205F, 0x205F, 0x3000, 0x3000, 0xFEFF, 0xFEFF };
+static const int kSpaceRangeCount = ARRAY_SIZE(kSpaceRanges);
-static const int kWordRangeCount = 8;
-static const uc16 kWordRanges[kWordRangeCount] = { '0', '9', 'A', 'Z', '_',
- '_', 'a', 'z' };
+static const uc16 kWordRanges[] = { '0', '9', 'A', 'Z', '_', '_', 'a', 'z' };
+static const int kWordRangeCount = ARRAY_SIZE(kWordRanges);
-static const int kDigitRangeCount = 2;
-static const uc16 kDigitRanges[kDigitRangeCount] = { '0', '9' };
+static const uc16 kDigitRanges[] = { '0', '9' };
+static const int kDigitRangeCount = ARRAY_SIZE(kDigitRanges);
-static const int kLineTerminatorRangeCount = 6;
-static const uc16 kLineTerminatorRanges[kLineTerminatorRangeCount] = { 0x000A,
- 0x000A, 0x000D, 0x000D, 0x2028, 0x2029 };
+static const uc16 kLineTerminatorRanges[] = { 0x000A, 0x000A, 0x000D, 0x000D,
+ 0x2028, 0x2029 };
+static const int kLineTerminatorRangeCount = ARRAY_SIZE(kLineTerminatorRanges);
RegExpNode* RegExpAtom::ToNode(RegExpCompiler* compiler,
RegExpNode* on_success) {
@@ -4079,7 +4793,7 @@ static void AddClassNegated(const uc16 *elmv,
int elmc,
ZoneList<CharacterRange>* ranges) {
ASSERT(elmv[0] != 0x0000);
- ASSERT(elmv[elmc-1] != String::kMaxUC16CharCode);
+ ASSERT(elmv[elmc-1] != String::kMaxUtf16CodeUnit);
uc16 last = 0x0000;
for (int i = 0; i < elmc; i += 2) {
ASSERT(last <= elmv[i] - 1);
@@ -4087,7 +4801,7 @@ static void AddClassNegated(const uc16 *elmv,
ranges->Add(CharacterRange(last, elmv[i] - 1));
last = elmv[i + 1] + 1;
}
- ranges->Add(CharacterRange(last, String::kMaxUC16CharCode));
+ ranges->Add(CharacterRange(last, String::kMaxUtf16CodeUnit));
}
@@ -4226,7 +4940,7 @@ void CharacterRange::AddCaseEquivalents(ZoneList<CharacterRange>* ranges,
// as a "singleton block").
unibrow::uchar range[unibrow::Ecma262UnCanonicalize::kMaxWidth];
int pos = bottom;
- while (pos < top) {
+ while (pos <= top) {
int length = isolate->jsregexp_canonrange()->get(pos, '\0', range);
uc16 block_end;
if (length == 0) {
@@ -4633,8 +5347,8 @@ void CharacterRange::Negate(ZoneList<CharacterRange>* ranges,
from = range.to();
i++;
}
- if (from < String::kMaxUC16CharCode) {
- negated_ranges->Add(CharacterRange(from + 1, String::kMaxUC16CharCode));
+ if (from < String::kMaxUtf16CodeUnit) {
+ negated_ranges->Add(CharacterRange(from + 1, String::kMaxUtf16CodeUnit));
}
}
@@ -4797,7 +5511,7 @@ void DispatchTable::AddRange(CharacterRange full_range, int value) {
entry->AddValue(value);
// Bail out if the last interval ended at 0xFFFF since otherwise
// adding 1 will wrap around to 0.
- if (entry->to() == String::kMaxUC16CharCode)
+ if (entry->to() == String::kMaxUtf16CodeUnit)
break;
ASSERT(entry->to() + 1 > current.from());
current.set_from(entry->to() + 1);
@@ -5093,6 +5807,76 @@ int BackReferenceNode::ComputeFirstCharacterSet(int budget) {
}
+void ChoiceNode::FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ ZoneList<GuardedAlternative>* alts = alternatives();
+ for (int i = 0; i < alts->length(); i++) {
+ GuardedAlternative& alt = alts->at(i);
+ if (alt.guards() != NULL && alt.guards()->length() != 0) {
+ bm->SetRest(offset); // Give up trying to fill in info.
+ return;
+ }
+ alt.node()->FillInBMInfo(offset, bm, not_at_start);
+ }
+}
+
+
+void TextNode::FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ if (offset >= bm->length()) return;
+ int max_char = bm->max_char();
+ for (int i = 0; i < elements()->length(); i++) {
+ if (offset >= bm->length()) return;
+ TextElement text = elements()->at(i);
+ if (text.type == TextElement::ATOM) {
+ RegExpAtom* atom = text.data.u_atom;
+ for (int j = 0; j < atom->length(); j++, offset++) {
+ if (offset >= bm->length()) return;
+ uc16 character = atom->data()[j];
+ if (bm->compiler()->ignore_case()) {
+ unibrow::uchar chars[unibrow::Ecma262UnCanonicalize::kMaxWidth];
+ int length = GetCaseIndependentLetters(
+ ISOLATE,
+ character,
+ bm->max_char() == String::kMaxAsciiCharCode,
+ chars);
+ for (int j = 0; j < length; j++) {
+ bm->Set(offset, chars[j]);
+ }
+ } else {
+ if (character <= max_char) bm->Set(offset, character);
+ }
+ }
+ } else {
+ ASSERT(text.type == TextElement::CHAR_CLASS);
+ RegExpCharacterClass* char_class = text.data.u_char_class;
+ ZoneList<CharacterRange>* ranges = char_class->ranges();
+ if (char_class->is_negated()) {
+ bm->SetAll(offset);
+ } else {
+ for (int k = 0; k < ranges->length(); k++) {
+ CharacterRange& range = ranges->at(k);
+ if (range.from() > max_char) continue;
+ int to = Min(max_char, static_cast<int>(range.to()));
+ if (to - range.from() >= BoyerMooreLookahead::kTooManyCharacters) {
+ bm->SetAll(offset);
+ break;
+ }
+ for (int m = range.from(); m <= to; m++) {
+ bm->Set(offset, m);
+ }
+ }
+ }
+ offset++;
+ }
+ }
+ if (offset >= bm->length()) return;
+ on_success()->FillInBMInfo(offset,
+ bm,
+ true); // Not at start after a text node.
+}
+
+
int TextNode::ComputeFirstCharacterSet(int budget) {
budget--;
if (budget >= 0) {
@@ -5117,7 +5901,7 @@ int TextNode::ComputeFirstCharacterSet(int budget) {
int new_length = length + 1;
if (length > 0) {
if (ranges->at(0).from() == 0) new_length--;
- if (ranges->at(length - 1).to() == String::kMaxUC16CharCode) {
+ if (ranges->at(length - 1).to() == String::kMaxUtf16CodeUnit) {
new_length--;
}
}
@@ -5207,14 +5991,14 @@ void DispatchTableConstructor::AddInverse(ZoneList<CharacterRange>* ranges) {
if (last < range.from())
AddRange(CharacterRange(last, range.from() - 1));
if (range.to() >= last) {
- if (range.to() == String::kMaxUC16CharCode) {
+ if (range.to() == String::kMaxUtf16CodeUnit) {
return;
} else {
last = range.to() + 1;
}
}
}
- AddRange(CharacterRange(last, String::kMaxUC16CharCode));
+ AddRange(CharacterRange(last, String::kMaxUtf16CodeUnit));
}
@@ -5250,15 +6034,30 @@ void DispatchTableConstructor::VisitAction(ActionNode* that) {
}
-RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data,
- bool ignore_case,
- bool is_multiline,
- Handle<String> pattern,
- bool is_ascii) {
+RegExpEngine::CompilationResult RegExpEngine::Compile(
+ RegExpCompileData* data,
+ bool ignore_case,
+ bool is_multiline,
+ Handle<String> pattern,
+ Handle<String> sample_subject,
+ bool is_ascii) {
if ((data->capture_count + 1) * 2 - 1 > RegExpMacroAssembler::kMaxRegister) {
return IrregexpRegExpTooBig();
}
RegExpCompiler compiler(data->capture_count, ignore_case, is_ascii);
+
+ // Sample some characters from the middle of the string.
+ static const int kSampleSize = 128;
+
+ FlattenString(sample_subject);
+ int chars_sampled = 0;
+ int half_way = (sample_subject->length() - kSampleSize) / 2;
+ for (int i = Max(0, half_way);
+ i < sample_subject->length() && chars_sampled < kSampleSize;
+ i++, chars_sampled++) {
+ compiler.frequency_collator()->CountCharacter(sample_subject->Get(i));
+ }
+
// Wrap the body of the regexp in capture #0.
RegExpNode* captured_body = RegExpCapture::ToNode(data->tree,
0,
diff --git a/src/3rdparty/v8/src/jsregexp.h b/src/3rdparty/v8/src/jsregexp.h
index df110d1..288e995 100644
--- a/src/3rdparty/v8/src/jsregexp.h
+++ b/src/3rdparty/v8/src/jsregexp.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,9 +35,11 @@
namespace v8 {
namespace internal {
-
+class NodeVisitor;
+class RegExpCompiler;
class RegExpMacroAssembler;
-
+class RegExpNode;
+class RegExpTree;
class RegExpImpl {
public:
@@ -188,8 +190,10 @@ class RegExpImpl {
static String* last_ascii_string_;
static String* two_byte_cached_string_;
- static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii);
- static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii);
+ static bool CompileIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii);
+ static inline bool EnsureCompiledIrregexp(
+ Handle<JSRegExp> re, Handle<String> sample_subject, bool is_ascii);
// Set the subject cache. The previous string buffer is not deleted, so the
@@ -417,6 +421,90 @@ class DispatchTable : public ZoneObject {
};
+// Improve the speed that we scan for an initial point where a non-anchored
+// regexp can match by using a Boyer-Moore-like table. This is done by
+// identifying non-greedy non-capturing loops in the nodes that eat any
+// character one at a time. For example in the middle of the regexp
+// /foo[\s\S]*?bar/ we find such a loop. There is also such a loop implicitly
+// inserted at the start of any non-anchored regexp.
+//
+// When we have found such a loop we look ahead in the nodes to find the set of
+// characters that can come at given distances. For example for the regexp
+// /.?foo/ we know that there are at least 3 characters ahead of us, and the
+// sets of characters that can occur are [any, [f, o], [o]]. We find a range in
+// the lookahead info where the set of characters is reasonably constrained. In
+// our example this is from index 1 to 2 (0 is not constrained). We can now
+// look 3 characters ahead and if we don't find one of [f, o] (the union of
+// [f, o] and [o]) then we can skip forwards by the range size (in this case 2).
+//
+// For Unicode input strings we do the same, but modulo 128.
+//
+// We also look at the first string fed to the regexp and use that to get a hint
+// of the character frequencies in the inputs. This affects the assessment of
+// whether the set of characters is 'reasonably constrained'.
+//
+// We also have another lookahead mechanism (called quick check in the code),
+// which uses a wide load of multiple characters followed by a mask and compare
+// to determine whether a match is possible at this point.
+class BoyerMooreLookahead {
+ public:
+ BoyerMooreLookahead(int length, int map_length, RegExpCompiler* compiler);
+
+ int length() { return length_; }
+ int max_char() { return max_char_; }
+ RegExpCompiler* compiler() { return compiler_; }
+
+ static const int kTooManyCharacters = 32;
+
+ int Count(int map_number) {
+ ZoneList<bool>* map = bitmaps_->at(map_number);
+ if (map == NULL) return map_length_;
+ int count = 0;
+ for (int i = 0; i < map_length_; i++) {
+ if (map->at(i)) count++;
+ }
+ return count;
+ }
+
+ void Set(int map_number, int character) {
+ if (character > max_char_) return;
+ ZoneList<bool>* map = bitmaps_->at(map_number);
+ if (map == NULL) return;
+ map->at(character & (map_length_ - 1)) = true;
+ }
+
+ void SetAll(int map_number) {
+ bitmaps_->at(map_number) = NULL;
+ }
+
+ void SetRest(int from_map) {
+ for (int i = from_map; i < length_; i++) SetAll(i);
+ }
+ bool EmitSkipInstructions(RegExpMacroAssembler* masm);
+
+ private:
+ // This is the value obtained by EatsAtLeast. If we do not have at least this
+ // many characters left in the sample string then the match is bound to fail.
+ // Therefore it is OK to read a character this far ahead of the current match
+ // point.
+ int length_;
+ // We conservatively consider all character values modulo this length. For
+ // ASCII there is no loss of precision, since this has a value of 128.
+ int map_length_;
+ RegExpCompiler* compiler_;
+ // 0x7f for ASCII, 0xffff for UTF-16.
+ int max_char_;
+ ZoneList<ZoneList<bool>*>* bitmaps_;
+
+ int GetSkipTable(int min_lookahead,
+ int max_lookahead,
+ Handle<ByteArray> boolean_skip_table);
+ bool FindWorthwhileInterval(int* from, int* to);
+ int FindBestInterval(
+ int max_number_of_chars, int old_biggest_points, int* from, int* to);
+};
+
+
#define FOR_EACH_NODE_TYPE(VISIT) \
VISIT(End) \
VISIT(Action) \
@@ -633,8 +721,24 @@ class RegExpNode: public ZoneObject {
bool not_at_start) = 0;
static const int kNodeIsTooComplexForGreedyLoops = -1;
virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
+ // Only returns the successor for a text node of length 1 that matches any
+ // character and that has no guards on it.
+ virtual RegExpNode* GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler) {
+ return NULL;
+ }
+
+ // Collects information on the possible code units (mod 128) that can match if
+ // we look forward. This is used for a Boyer-Moore-like string searching
+ // implementation. TODO(erikcorry): This should share more code with
+ // EatsAtLeast, GetQuickCheckDetails and ComputeFirstCharacterSet.
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ UNREACHABLE();
+ }
+
Label* label() { return &label_; }
- // If non-generic code is generated for a node (ie the node is not at the
+ // If non-generic code is generated for a node (i.e. the node is not at the
// start of the trace) then it cannot be reused. This variable sets a limit
// on how often we allow that to happen before we insist on starting a new
// trace and generating generic code for a node that can be reused by flushing
@@ -745,6 +849,10 @@ class SeqRegExpNode: public RegExpNode {
: on_success_(on_success) { }
RegExpNode* on_success() { return on_success_; }
void set_on_success(RegExpNode* node) { on_success_ = node; }
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ on_success_->FillInBMInfo(offset, bm, not_at_start);
+ }
private:
RegExpNode* on_success_;
};
@@ -791,6 +899,8 @@ class ActionNode: public SeqRegExpNode {
return on_success()->GetQuickCheckDetails(
details, compiler, filled_in, not_at_start);
}
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start);
Type type() { return type_; }
// TODO(erikcorry): We should allow some action nodes in greedy loops.
virtual int GreedyLoopTextLength() { return kNodeIsTooComplexForGreedyLoops; }
@@ -858,6 +968,10 @@ class TextNode: public SeqRegExpNode {
ZoneList<TextElement>* elements() { return elms_; }
void MakeCaseIndependent(bool is_ascii);
virtual int GreedyLoopTextLength();
+ virtual RegExpNode* GetSuccessorOfOmnivorousTextNode(
+ RegExpCompiler* compiler);
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start);
virtual TextNode* Clone() {
TextNode* result = new TextNode(*this);
result->CalculateOffsets();
@@ -926,6 +1040,8 @@ class AssertionNode: public SeqRegExpNode {
RegExpCompiler* compiler,
int filled_in,
bool not_at_start);
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start);
virtual int ComputeFirstCharacterSet(int budget);
virtual AssertionNode* Clone() { return new AssertionNode(*this); }
AssertionNodeType type() { return type_; }
@@ -959,6 +1075,12 @@ class BackReferenceNode: public SeqRegExpNode {
bool not_at_start) {
return;
}
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ // Working out the set of characters that a backreference can match is too
+ // hard, so we just say that any character can match.
+ bm->SetRest(offset);
+ }
virtual BackReferenceNode* Clone() { return new BackReferenceNode(*this); }
virtual int ComputeFirstCharacterSet(int budget);
@@ -984,6 +1106,11 @@ class EndNode: public RegExpNode {
// Returning 0 from EatsAtLeast should ensure we never get here.
UNREACHABLE();
}
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ // Returning 0 from EatsAtLeast should ensure we never get here.
+ UNREACHABLE();
+ }
virtual EndNode* Clone() { return new EndNode(*this); }
private:
Action action_;
@@ -1069,6 +1196,8 @@ class ChoiceNode: public RegExpNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start);
virtual ChoiceNode* Clone() { return new ChoiceNode(*this); }
bool being_calculated() { return being_calculated_; }
@@ -1087,7 +1216,7 @@ class ChoiceNode: public RegExpNode {
void GenerateGuard(RegExpMacroAssembler* macro_assembler,
Guard* guard,
Trace* trace);
- int CalculatePreloadCharacters(RegExpCompiler* compiler, bool not_at_start);
+ int CalculatePreloadCharacters(RegExpCompiler* compiler, int eats_at_least);
void EmitOutOfLineContinuation(RegExpCompiler* compiler,
Trace* trace,
GuardedAlternative alternative,
@@ -1117,6 +1246,10 @@ class NegativeLookaheadChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start) {
+ alternatives_->at(1).node()->FillInBMInfo(offset, bm, not_at_start);
+ }
// For a negative lookahead we don't emit the quick check for the
// alternative that is expected to fail. This is because quick check code
// starts by loading enough characters for the alternative that takes fewest
@@ -1144,6 +1277,8 @@ class LoopChoiceNode: public ChoiceNode {
RegExpCompiler* compiler,
int characters_filled_in,
bool not_at_start);
+ virtual void FillInBMInfo(
+ int offset, BoyerMooreLookahead* bm, bool not_at_start);
virtual int ComputeFirstCharacterSet(int budget);
virtual LoopChoiceNode* Clone() { return new LoopChoiceNode(*this); }
RegExpNode* loop_node() { return loop_node_; }
@@ -1456,6 +1591,7 @@ class RegExpEngine: public AllStatic {
bool ignore_case,
bool multiline,
Handle<String> pattern,
+ Handle<String> sample_subject,
bool is_ascii);
static void DotPrint(const char* label, RegExpNode* node, bool ignore_case);
@@ -1464,12 +1600,12 @@ class RegExpEngine: public AllStatic {
class OffsetsVector {
public:
- explicit inline OffsetsVector(int num_registers)
+ inline OffsetsVector(int num_registers, Isolate* isolate)
: offsets_vector_length_(num_registers) {
if (offsets_vector_length_ > Isolate::kJSRegexpStaticOffsetsVectorSize) {
vector_ = NewArray<int>(offsets_vector_length_);
} else {
- vector_ = Isolate::Current()->jsregexp_static_offsets_vector();
+ vector_ = isolate->jsregexp_static_offsets_vector();
}
}
inline ~OffsetsVector() {
diff --git a/src/3rdparty/v8/src/lazy-instance.h b/src/3rdparty/v8/src/lazy-instance.h
new file mode 100644
index 0000000..4c09b0d
--- /dev/null
+++ b/src/3rdparty/v8/src/lazy-instance.h
@@ -0,0 +1,263 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// The LazyInstance<Type, Traits> class manages a single instance of Type,
+// which will be lazily created on the first time it's accessed. This class is
+// useful for places you would normally use a function-level static, but you
+// need to have guaranteed thread-safety. The Type constructor will only ever
+// be called once, even if two threads are racing to create the object. Get()
+// and Pointer() will always return the same, completely initialized instance.
+//
+// LazyInstance is completely thread safe, assuming that you create it safely.
+// The class was designed to be POD initialized, so it shouldn't require a
+// static constructor. It really only makes sense to declare a LazyInstance as
+// a global variable using the LAZY_INSTANCE_INITIALIZER initializer.
+//
+// LazyInstance is similar to Singleton, except it does not have the singleton
+// property. You can have multiple LazyInstance's of the same type, and each
+// will manage a unique instance. It also preallocates the space for Type, as
+// to avoid allocating the Type instance on the heap. This may help with the
+// performance of creating the instance, and reducing heap fragmentation. This
+// requires that Type be a complete type so we can determine the size. See
+// notes for advanced users below for more explanations.
+//
+// Example usage:
+// static LazyInstance<MyClass>::type my_instance = LAZY_INSTANCE_INITIALIZER;
+// void SomeMethod() {
+// my_instance.Get().SomeMethod(); // MyClass::SomeMethod()
+//
+// MyClass* ptr = my_instance.Pointer();
+// ptr->DoDoDo(); // MyClass::DoDoDo
+// }
+//
+// Additionally you can override the way your instance is constructed by
+// providing your own trait:
+// Example usage:
+// struct MyCreateTrait {
+// static void Construct(MyClass* allocated_ptr) {
+// new (allocated_ptr) MyClass(/* extra parameters... */);
+// }
+// };
+// static LazyInstance<MyClass, MyCreateTrait>::type my_instance =
+// LAZY_INSTANCE_INITIALIZER;
+//
+// WARNINGS:
+// - This implementation of LazyInstance is NOT THREAD-SAFE by default. See
+// ThreadSafeInitOnceTrait declared below for that.
+// - Lazy initialization comes with a cost. Make sure that you don't use it on
+// critical path. Consider adding your initialization code to a function
+// which is explicitly called once.
+//
+// Notes for advanced users:
+// LazyInstance can actually be used in two different ways:
+//
+// - "Static mode" which is the default mode since it is the most efficient
+// (no extra heap allocation). In this mode, the instance is statically
+// allocated (stored in the global data section at compile time).
+// The macro LAZY_STATIC_INSTANCE_INITIALIZER (= LAZY_INSTANCE_INITIALIZER)
+// must be used to initialize static lazy instances.
+//
+// - "Dynamic mode". In this mode, the instance is dynamically allocated and
+// constructed (using new) by default. This mode is useful if you have to
+// deal with some code already allocating the instance for you (e.g.
+// OS::Mutex() which returns a new private OS-dependent subclass of Mutex).
+// The macro LAZY_DYNAMIC_INSTANCE_INITIALIZER must be used to initialize
+// dynamic lazy instances.
+
+#ifndef V8_LAZY_INSTANCE_H_
+#define V8_LAZY_INSTANCE_H_
+
+#include "once.h"
+
+namespace v8 {
+namespace internal {
+
+#define LAZY_STATIC_INSTANCE_INITIALIZER { V8_ONCE_INIT, {} }
+#define LAZY_DYNAMIC_INSTANCE_INITIALIZER { V8_ONCE_INIT, 0 }
+
+// Default to static mode.
+#define LAZY_INSTANCE_INITIALIZER LAZY_STATIC_INSTANCE_INITIALIZER
+
+
+template <typename T>
+struct LeakyInstanceTrait {
+ static void Destroy(T* /* instance */) {}
+};
+
+
+// Traits that define how an instance is allocated and accessed.
+
+// TODO(kalmard): __alignof__ is only defined for GCC > 4.2. Fix alignment issue
+// on MIPS with other compilers.
+#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 2))
+#define LAZY_ALIGN(x) __attribute__((aligned(__alignof__(x))))
+#else
+#define LAZY_ALIGN(x)
+#endif
+
+template <typename T>
+struct StaticallyAllocatedInstanceTrait {
+ typedef char StorageType[sizeof(T)] LAZY_ALIGN(T);
+
+ static T* MutableInstance(StorageType* storage) {
+ return reinterpret_cast<T*>(storage);
+ }
+
+ template <typename ConstructTrait>
+ static void InitStorageUsingTrait(StorageType* storage) {
+ ConstructTrait::Construct(MutableInstance(storage));
+ }
+};
+
+#undef LAZY_ALIGN
+
+
+template <typename T>
+struct DynamicallyAllocatedInstanceTrait {
+ typedef T* StorageType;
+
+ static T* MutableInstance(StorageType* storage) {
+ return *storage;
+ }
+
+ template <typename CreateTrait>
+ static void InitStorageUsingTrait(StorageType* storage) {
+ *storage = CreateTrait::Create();
+ }
+};
+
+
+template <typename T>
+struct DefaultConstructTrait {
+ // Constructs the provided object which was already allocated.
+ static void Construct(T* allocated_ptr) {
+ new(allocated_ptr) T();
+ }
+};
+
+
+template <typename T>
+struct DefaultCreateTrait {
+ static T* Create() {
+ return new T();
+ }
+};
+
+
+struct ThreadSafeInitOnceTrait {
+ template <typename Function, typename Storage>
+ static void Init(OnceType* once, Function function, Storage storage) {
+ CallOnce(once, function, storage);
+ }
+};
+
+
+// Initialization trait for users who don't care about thread-safety.
+struct SingleThreadInitOnceTrait {
+ template <typename Function, typename Storage>
+ static void Init(OnceType* once, Function function, Storage storage) {
+ if (*once == ONCE_STATE_UNINITIALIZED) {
+ function(storage);
+ *once = ONCE_STATE_DONE;
+ }
+ }
+};
+
+
+// TODO(pliard): Handle instances destruction (using global destructors).
+template <typename T, typename AllocationTrait, typename CreateTrait,
+ typename InitOnceTrait, typename DestroyTrait /* not used yet. */>
+struct LazyInstanceImpl {
+ public:
+ typedef typename AllocationTrait::StorageType StorageType;
+
+ private:
+ static void InitInstance(StorageType* storage) {
+ AllocationTrait::template InitStorageUsingTrait<CreateTrait>(storage);
+ }
+
+ void Init() const {
+ InitOnceTrait::Init(
+ &once_,
+ // Casts to void* are needed here to avoid breaking strict aliasing
+ // rules.
+ reinterpret_cast<void(*)(void*)>(&InitInstance), // NOLINT
+ reinterpret_cast<void*>(&storage_));
+ }
+
+ public:
+ T* Pointer() {
+ Init();
+ return AllocationTrait::MutableInstance(&storage_);
+ }
+
+ const T& Get() const {
+ Init();
+ return *AllocationTrait::MutableInstance(&storage_);
+ }
+
+ mutable OnceType once_;
+ // Note that the previous field, OnceType, is an AtomicWord which guarantees
+ // 4-byte alignment of the storage field below. If compiling with GCC (>4.2),
+ // the LAZY_ALIGN macro above will guarantee correctness for any alignment.
+ mutable StorageType storage_;
+};
+
+
+template <typename T,
+ typename CreateTrait = DefaultConstructTrait<T>,
+ typename InitOnceTrait = SingleThreadInitOnceTrait,
+ typename DestroyTrait = LeakyInstanceTrait<T> >
+struct LazyStaticInstance {
+ typedef LazyInstanceImpl<T, StaticallyAllocatedInstanceTrait<T>,
+ CreateTrait, InitOnceTrait, DestroyTrait> type;
+};
+
+
+template <typename T,
+ typename CreateTrait = DefaultConstructTrait<T>,
+ typename InitOnceTrait = SingleThreadInitOnceTrait,
+ typename DestroyTrait = LeakyInstanceTrait<T> >
+struct LazyInstance {
+ // A LazyInstance is a LazyStaticInstance.
+ typedef typename LazyStaticInstance<T, CreateTrait, InitOnceTrait,
+ DestroyTrait>::type type;
+};
+
+
+template <typename T,
+ typename CreateTrait = DefaultConstructTrait<T>,
+ typename InitOnceTrait = SingleThreadInitOnceTrait,
+ typename DestroyTrait = LeakyInstanceTrait<T> >
+struct LazyDynamicInstance {
+ typedef LazyInstanceImpl<T, DynamicallyAllocatedInstanceTrait<T>,
+ CreateTrait, InitOnceTrait, DestroyTrait> type;
+};
+
+} } // namespace v8::internal
+
+#endif // V8_LAZY_INSTANCE_H_
diff --git a/src/3rdparty/v8/src/list-inl.h b/src/3rdparty/v8/src/list-inl.h
index e2c358c..7c2c83f 100644
--- a/src/3rdparty/v8/src/list-inl.h
+++ b/src/3rdparty/v8/src/list-inl.h
@@ -72,9 +72,9 @@ void List<T, P>::ResizeAdd(const T& element) {
template<typename T, class P>
void List<T, P>::ResizeAddInternal(const T& element) {
ASSERT(length_ >= capacity_);
- // Grow the list capacity by 50%, but make sure to let it grow
+ // Grow the list capacity by 100%, but make sure to let it grow
// even when the capacity is zero (possible initial case).
- int new_capacity = 1 + capacity_ + (capacity_ >> 1);
+ int new_capacity = 1 + 2 * capacity_;
// Since the element reference could be an element of the list, copy
// it out of the old backing storage before resizing.
T temp = element;
diff --git a/src/3rdparty/v8/src/list.h b/src/3rdparty/v8/src/list.h
index 57504e0..adddea4 100644
--- a/src/3rdparty/v8/src/list.h
+++ b/src/3rdparty/v8/src/list.h
@@ -67,7 +67,7 @@ class List {
// Returns a reference to the element at index i. This reference is
// not safe to use after operations that can change the list's
- // backing store (eg, Add).
+ // backing store (e.g. Add).
inline T& operator[](int i) const {
ASSERT(0 <= i);
ASSERT(i < length_);
diff --git a/src/3rdparty/v8/src/lithium-allocator.cc b/src/3rdparty/v8/src/lithium-allocator.cc
index 35281eb..4396c73 100644
--- a/src/3rdparty/v8/src/lithium-allocator.cc
+++ b/src/3rdparty/v8/src/lithium-allocator.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -46,29 +46,6 @@
namespace v8 {
namespace internal {
-
-#define DEFINE_OPERAND_CACHE(name, type) \
- name name::cache[name::kNumCachedOperands]; \
- void name::SetupCache() { \
- for (int i = 0; i < kNumCachedOperands; i++) { \
- cache[i].ConvertTo(type, i); \
- } \
- } \
- static bool name##_initialize() { \
- name::SetupCache(); \
- return true; \
- } \
- static bool name##_cache_initialized = name##_initialize();
-
-DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
-DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
-DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
-DEFINE_OPERAND_CACHE(LRegister, REGISTER)
-DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
-
-#undef DEFINE_OPERAND_CACHE
-
-
static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
return a.Value() < b.Value() ? a : b;
}
@@ -110,9 +87,9 @@ bool UsePosition::RegisterIsBeneficial() const {
}
-void UseInterval::SplitAt(LifetimePosition pos) {
+void UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
ASSERT(Contains(pos) && pos.Value() != start().Value());
- UseInterval* after = new UseInterval(pos, end_);
+ UseInterval* after = new(zone) UseInterval(pos, end_);
after->next_ = next_;
next_ = after;
end_ = pos;
@@ -149,11 +126,11 @@ bool LiveRange::HasOverlap(UseInterval* target) const {
#endif
-LiveRange::LiveRange(int id)
+LiveRange::LiveRange(int id, Zone* zone)
: id_(id),
spilled_(false),
+ is_double_(false),
assigned_register_(kInvalidAssignment),
- assigned_register_kind_(NONE),
last_interval_(NULL),
first_interval_(NULL),
first_pos_(NULL),
@@ -161,37 +138,39 @@ LiveRange::LiveRange(int id)
next_(NULL),
current_interval_(NULL),
last_processed_use_(NULL),
- spill_start_index_(kMaxInt) {
- spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
-}
+ spill_operand_(new(zone) LOperand()),
+ spill_start_index_(kMaxInt) { }
-void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
+void LiveRange::set_assigned_register(int reg,
+ RegisterKind register_kind,
+ Zone* zone) {
ASSERT(!HasRegisterAssigned() && !IsSpilled());
assigned_register_ = reg;
- assigned_register_kind_ = register_kind;
- ConvertOperands();
+ is_double_ = (register_kind == DOUBLE_REGISTERS);
+ ConvertOperands(zone);
}
-void LiveRange::MakeSpilled() {
+void LiveRange::MakeSpilled(Zone* zone) {
ASSERT(!IsSpilled());
ASSERT(TopLevel()->HasAllocatedSpillOperand());
spilled_ = true;
assigned_register_ = kInvalidAssignment;
- ConvertOperands();
+ ConvertOperands(zone);
}
bool LiveRange::HasAllocatedSpillOperand() const {
- return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
+ ASSERT(spill_operand_ != NULL);
+ return !spill_operand_->IsIgnored();
}
void LiveRange::SetSpillOperand(LOperand* operand) {
ASSERT(!operand->IsUnallocated());
ASSERT(spill_operand_ != NULL);
- ASSERT(spill_operand_->IsUnallocated());
+ ASSERT(spill_operand_->IsIgnored());
spill_operand_->ConvertTo(operand->kind(), operand->index());
}
@@ -234,7 +213,8 @@ bool LiveRange::CanBeSpilled(LifetimePosition pos) {
// at the current or the immediate next position.
UsePosition* use_pos = NextRegisterPosition(pos);
if (use_pos == NULL) return true;
- return use_pos->pos().Value() > pos.NextInstruction().Value();
+ return
+ use_pos->pos().Value() > pos.NextInstruction().InstructionEnd().Value();
}
@@ -245,7 +225,7 @@ UsePosition* LiveRange::FirstPosWithHint() const {
}
-LOperand* LiveRange::CreateAssignedOperand() {
+LOperand* LiveRange::CreateAssignedOperand(Zone* zone) {
LOperand* op = NULL;
if (HasRegisterAssigned()) {
ASSERT(!IsSpilled());
@@ -259,7 +239,7 @@ LOperand* LiveRange::CreateAssignedOperand() {
op = TopLevel()->GetSpillOperand();
ASSERT(!op->IsUnallocated());
} else {
- LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
+ LUnallocated* unalloc = new(zone) LUnallocated(LUnallocated::NONE);
unalloc->set_virtual_register(id_);
op = unalloc;
}
@@ -291,7 +271,9 @@ void LiveRange::AdvanceLastProcessedMarker(
}
-void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
+void LiveRange::SplitAt(LifetimePosition position,
+ LiveRange* result,
+ Zone* zone) {
ASSERT(Start().Value() < position.Value());
ASSERT(result->IsEmpty());
// Find the last interval that ends before the position. If the
@@ -310,7 +292,7 @@ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
while (current != NULL) {
if (current->Contains(position)) {
- current->SplitAt(position);
+ current->SplitAt(position, zone);
break;
}
UseInterval* next = current->next();
@@ -403,7 +385,9 @@ void LiveRange::ShortenTo(LifetimePosition start) {
}
-void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
+void LiveRange::EnsureInterval(LifetimePosition start,
+ LifetimePosition end,
+ Zone* zone) {
LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
id_,
start.Value(),
@@ -417,7 +401,7 @@ void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
first_interval_ = first_interval_->next();
}
- UseInterval* new_interval = new UseInterval(start, new_end);
+ UseInterval* new_interval = new(zone) UseInterval(start, new_end);
new_interval->next_ = first_interval_;
first_interval_ = new_interval;
if (new_interval->next() == NULL) {
@@ -426,20 +410,22 @@ void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
}
-void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
+void LiveRange::AddUseInterval(LifetimePosition start,
+ LifetimePosition end,
+ Zone* zone) {
LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
id_,
start.Value(),
end.Value());
if (first_interval_ == NULL) {
- UseInterval* interval = new UseInterval(start, end);
+ UseInterval* interval = new(zone) UseInterval(start, end);
first_interval_ = interval;
last_interval_ = interval;
} else {
if (end.Value() == first_interval_->start().Value()) {
first_interval_->set_start(start);
} else if (end.Value() < first_interval_->start().Value()) {
- UseInterval* interval = new UseInterval(start, end);
+ UseInterval* interval = new(zone) UseInterval(start, end);
interval->set_next(first_interval_);
first_interval_ = interval;
} else {
@@ -455,11 +441,12 @@ void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
- LOperand* operand) {
+ LOperand* operand,
+ Zone* zone) {
LAllocator::TraceAlloc("Add to live range %d use position %d\n",
id_,
pos.Value());
- UsePosition* use_pos = new UsePosition(pos, operand);
+ UsePosition* use_pos = new(zone) UsePosition(pos, operand);
UsePosition* prev = NULL;
UsePosition* current = first_pos_;
while (current != NULL && current->pos().Value() < pos.Value()) {
@@ -479,8 +466,8 @@ UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
}
-void LiveRange::ConvertOperands() {
- LOperand* op = CreateAssignedOperand();
+void LiveRange::ConvertOperands(Zone* zone) {
+ LOperand* op = CreateAssignedOperand(zone);
UsePosition* use_pos = first_pos();
while (use_pos != NULL) {
ASSERT(Start().Value() <= use_pos->pos().Value() &&
@@ -544,7 +531,8 @@ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
LAllocator::LAllocator(int num_values, HGraph* graph)
- : chunk_(NULL),
+ : zone_(graph->zone()),
+ chunk_(NULL),
live_in_sets_(graph->blocks()->length()),
live_ranges_(num_values * 2),
fixed_live_ranges_(NULL),
@@ -555,10 +543,11 @@ LAllocator::LAllocator(int num_values, HGraph* graph)
reusable_slots_(8),
next_virtual_register_(num_values),
first_artificial_register_(num_values),
- mode_(NONE),
+ mode_(GENERAL_REGISTERS),
num_registers_(-1),
graph_(graph),
- has_osr_entry_(false) {}
+ has_osr_entry_(false),
+ allocation_ok_(true) { }
void LAllocator::InitializeLivenessAnalysis() {
@@ -572,7 +561,7 @@ void LAllocator::InitializeLivenessAnalysis() {
BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
// Compute live out for the given block, except not including backward
// successor edges.
- BitVector* live_out = new BitVector(next_virtual_register_);
+ BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_);
// Process all successor blocks.
for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
@@ -610,7 +599,7 @@ void LAllocator::AddInitialIntervals(HBasicBlock* block,
while (!iterator.Done()) {
int operand_index = iterator.Current();
LiveRange* range = LiveRangeFor(operand_index);
- range->AddUseInterval(start, end);
+ range->AddUseInterval(start, end, zone_);
iterator.Advance();
}
}
@@ -652,9 +641,9 @@ LiveRange* LAllocator::FixedLiveRangeFor(int index) {
ASSERT(index < Register::kNumAllocatableRegisters);
LiveRange* result = fixed_live_ranges_[index];
if (result == NULL) {
- result = new LiveRange(FixedLiveRangeID(index));
+ result = new(zone_) LiveRange(FixedLiveRangeID(index), zone_);
ASSERT(result->IsFixed());
- result->set_assigned_register(index, GENERAL_REGISTERS);
+ result->set_assigned_register(index, GENERAL_REGISTERS, zone_);
fixed_live_ranges_[index] = result;
}
return result;
@@ -665,9 +654,9 @@ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
ASSERT(index < DoubleRegister::kNumAllocatableRegisters);
LiveRange* result = fixed_double_live_ranges_[index];
if (result == NULL) {
- result = new LiveRange(FixedDoubleLiveRangeID(index));
+ result = new(zone_) LiveRange(FixedDoubleLiveRangeID(index), zone_);
ASSERT(result->IsFixed());
- result->set_assigned_register(index, DOUBLE_REGISTERS);
+ result->set_assigned_register(index, DOUBLE_REGISTERS, zone_);
fixed_double_live_ranges_[index] = result;
}
return result;
@@ -680,7 +669,7 @@ LiveRange* LAllocator::LiveRangeFor(int index) {
}
LiveRange* result = live_ranges_[index];
if (result == NULL) {
- result = new LiveRange(index);
+ result = new(zone_) LiveRange(index, zone_);
live_ranges_[index] = result;
}
return result;
@@ -696,7 +685,7 @@ LGap* LAllocator::GetLastGap(HBasicBlock* block) {
HPhi* LAllocator::LookupPhi(LOperand* operand) const {
if (!operand->IsUnallocated()) return NULL;
- int index = operand->VirtualRegister();
+ int index = LUnallocated::cast(operand)->virtual_register();
HValue* instr = graph_->LookupValue(index);
if (instr != NULL && instr->IsPhi()) {
return HPhi::cast(instr);
@@ -726,15 +715,15 @@ void LAllocator::Define(LifetimePosition position,
if (range->IsEmpty() || range->Start().Value() > position.Value()) {
// Can happen if there is a definition without use.
- range->AddUseInterval(position, position.NextInstruction());
- range->AddUsePosition(position.NextInstruction(), NULL);
+ range->AddUseInterval(position, position.NextInstruction(), zone_);
+ range->AddUsePosition(position.NextInstruction(), NULL, zone_);
} else {
range->ShortenTo(position);
}
if (operand->IsUnallocated()) {
LUnallocated* unalloc_operand = LUnallocated::cast(operand);
- range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
+ range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
}
}
@@ -747,9 +736,9 @@ void LAllocator::Use(LifetimePosition block_start,
if (range == NULL) return;
if (operand->IsUnallocated()) {
LUnallocated* unalloc_operand = LUnallocated::cast(operand);
- range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
+ range->AddUsePosition(position, unalloc_operand, zone_)->set_hint(hint);
}
- range->AddUseInterval(block_start, position);
+ range->AddUseInterval(block_start, position, zone_);
}
@@ -764,7 +753,8 @@ void LAllocator::AddConstraintsGapMove(int index,
LMoveOperands cur = move_operands->at(i);
LOperand* cur_to = cur.destination();
if (cur_to->IsUnallocated()) {
- if (cur_to->VirtualRegister() == from->VirtualRegister()) {
+ if (LUnallocated::cast(cur_to)->virtual_register() ==
+ LUnallocated::cast(from)->virtual_register()) {
move->AddMove(cur.source(), to);
return;
}
@@ -785,6 +775,7 @@ void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
if (i < end) instr = InstructionAt(i + 1);
if (i > start) prev_instr = InstructionAt(i - 1);
MeetConstraintsBetween(prev_instr, instr, i);
+ if (!AllocationOk()) return;
}
}
}
@@ -806,11 +797,11 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
// Handle fixed output operand.
if (first != NULL && first->Output() != NULL) {
LUnallocated* first_output = LUnallocated::cast(first->Output());
- LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
+ LiveRange* range = LiveRangeFor(first_output->virtual_register());
bool assigned = false;
if (first_output->HasFixedPolicy()) {
LUnallocated* output_copy = first_output->CopyUnconstrained();
- bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
+ bool is_tagged = HasTaggedValue(first_output->virtual_register());
AllocateFixed(first_output, gap_index, is_tagged);
// This value is produced on the stack, we never need to spill it.
@@ -841,7 +832,7 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
LUnallocated* cur_input = LUnallocated::cast(it.Current());
if (cur_input->HasFixedPolicy()) {
LUnallocated* input_copy = cur_input->CopyUnconstrained();
- bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
+ bool is_tagged = HasTaggedValue(cur_input->virtual_register());
AllocateFixed(cur_input, gap_index + 1, is_tagged);
AddConstraintsGapMove(gap_index, input_copy, cur_input);
} else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
@@ -850,12 +841,14 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
ASSERT(!cur_input->IsUsedAtStart());
LUnallocated* input_copy = cur_input->CopyUnconstrained();
- cur_input->set_virtual_register(next_virtual_register_++);
+ cur_input->set_virtual_register(GetVirtualRegister());
+ if (!AllocationOk()) return;
if (RequiredRegisterKind(input_copy->virtual_register()) ==
DOUBLE_REGISTERS) {
double_artificial_registers_.Add(
- cur_input->virtual_register() - first_artificial_register_);
+ cur_input->virtual_register() - first_artificial_register_,
+ zone_);
}
AddConstraintsGapMove(gap_index, input_copy, cur_input);
@@ -868,8 +861,8 @@ void LAllocator::MeetConstraintsBetween(LInstruction* first,
LUnallocated* second_output = LUnallocated::cast(second->Output());
if (second_output->HasSameAsInputPolicy()) {
LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
- int output_vreg = second_output->VirtualRegister();
- int input_vreg = cur_input->VirtualRegister();
+ int output_vreg = second_output->virtual_register();
+ int input_vreg = cur_input->virtual_register();
LUnallocated* input_copy = cur_input->CopyUnconstrained();
cur_input->set_virtual_register(second_output->virtual_register());
@@ -924,9 +917,9 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
}
} else {
if (to->IsUnallocated()) {
- if (live->Contains(to->VirtualRegister())) {
+ if (live->Contains(LUnallocated::cast(to)->virtual_register())) {
Define(curr_position, to, from);
- live->Remove(to->VirtualRegister());
+ live->Remove(LUnallocated::cast(to)->virtual_register());
} else {
cur->Eliminate();
continue;
@@ -937,7 +930,7 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
}
Use(block_start_position, curr_position, from, hint);
if (from->IsUnallocated()) {
- live->Add(from->VirtualRegister());
+ live->Add(LUnallocated::cast(from)->virtual_register());
}
}
} else {
@@ -947,7 +940,9 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
if (instr != NULL) {
LOperand* output = instr->Output();
if (output != NULL) {
- if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
+ if (output->IsUnallocated()) {
+ live->Remove(LUnallocated::cast(output)->virtual_register());
+ }
Define(curr_position, output, NULL);
}
@@ -957,7 +952,8 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
output->index() != i) {
LiveRange* range = FixedLiveRangeFor(i);
range->AddUseInterval(curr_position,
- curr_position.InstructionEnd());
+ curr_position.InstructionEnd(),
+ zone_);
}
}
}
@@ -968,7 +964,8 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
output->index() != i) {
LiveRange* range = FixedDoubleLiveRangeFor(i);
range->AddUseInterval(curr_position,
- curr_position.InstructionEnd());
+ curr_position.InstructionEnd(),
+ zone_);
}
}
}
@@ -985,7 +982,9 @@ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
}
Use(block_start_position, use_pos, input, NULL);
- if (input->IsUnallocated()) live->Add(input->VirtualRegister());
+ if (input->IsUnallocated()) {
+ live->Add(LUnallocated::cast(input)->virtual_register());
+ }
}
for (TempIterator it(instr); !it.Done(); it.Advance()) {
@@ -1014,7 +1013,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
const ZoneList<HPhi*>* phis = block->phis();
for (int i = 0; i < phis->length(); ++i) {
HPhi* phi = phis->at(i);
- LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
+ LUnallocated* phi_operand = new(zone_) LUnallocated(LUnallocated::NONE);
phi_operand->set_virtual_register(phi->id());
for (int j = 0; j < phi->OperandCount(); ++j) {
HValue* op = phi->OperandAt(j);
@@ -1024,7 +1023,7 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
operand = chunk_->DefineConstantOperand(constant);
} else {
ASSERT(!op->EmitAtUses());
- LUnallocated* unalloc = new LUnallocated(LUnallocated::ANY);
+ LUnallocated* unalloc = new(zone_) LUnallocated(LUnallocated::ANY);
unalloc->set_virtual_register(op->id());
operand = unalloc;
}
@@ -1063,34 +1062,39 @@ void LAllocator::ResolvePhis(HBasicBlock* block) {
}
-void LAllocator::Allocate(LChunk* chunk) {
+bool LAllocator::Allocate(LChunk* chunk) {
ASSERT(chunk_ == NULL);
chunk_ = chunk;
MeetRegisterConstraints();
+ if (!AllocationOk()) return false;
ResolvePhis();
BuildLiveRanges();
AllocateGeneralRegisters();
+ if (!AllocationOk()) return false;
AllocateDoubleRegisters();
+ if (!AllocationOk()) return false;
PopulatePointerMaps();
if (has_osr_entry_) ProcessOsrEntry();
ConnectRanges();
ResolveControlFlow();
+ return true;
}
void LAllocator::MeetRegisterConstraints() {
- HPhase phase("Register constraints", chunk_);
+ HPhase phase("L_Register constraints", chunk_);
first_artificial_register_ = next_virtual_register_;
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
for (int i = 0; i < blocks->length(); ++i) {
HBasicBlock* block = blocks->at(i);
MeetRegisterConstraints(block);
+ if (!AllocationOk()) return;
}
}
void LAllocator::ResolvePhis() {
- HPhase phase("Resolve phis", chunk_);
+ HPhase phase("L_Resolve phis", chunk_);
// Process the blocks in reverse order.
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
@@ -1126,8 +1130,8 @@ void LAllocator::ResolveControlFlow(LiveRange* range,
if (cur_cover->IsSpilled()) return;
ASSERT(pred_cover != NULL && cur_cover != NULL);
if (pred_cover != cur_cover) {
- LOperand* pred_op = pred_cover->CreateAssignedOperand();
- LOperand* cur_op = cur_cover->CreateAssignedOperand();
+ LOperand* pred_op = pred_cover->CreateAssignedOperand(zone_);
+ LOperand* cur_op = cur_cover->CreateAssignedOperand(zone_);
if (!pred_op->Equals(cur_op)) {
LGap* gap = NULL;
if (block->predecessors()->length() == 1) {
@@ -1180,7 +1184,7 @@ HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
void LAllocator::ConnectRanges() {
- HPhase phase("Connect ranges", this);
+ HPhase phase("L_Connect ranges", this);
for (int i = 0; i < live_ranges()->length(); ++i) {
LiveRange* first_range = live_ranges()->at(i);
if (first_range == NULL || first_range->parent() != NULL) continue;
@@ -1199,8 +1203,8 @@ void LAllocator::ConnectRanges() {
}
if (should_insert) {
LParallelMove* move = GetConnectingParallelMove(pos);
- LOperand* prev_operand = first_range->CreateAssignedOperand();
- LOperand* cur_operand = second_range->CreateAssignedOperand();
+ LOperand* prev_operand = first_range->CreateAssignedOperand(zone_);
+ LOperand* cur_operand = second_range->CreateAssignedOperand(zone_);
move->AddMove(prev_operand, cur_operand);
}
}
@@ -1220,7 +1224,7 @@ bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
void LAllocator::ResolveControlFlow() {
- HPhase phase("Resolve control flow", this);
+ HPhase phase("L_Resolve control flow", this);
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
for (int block_id = 1; block_id < blocks->length(); ++block_id) {
HBasicBlock* block = blocks->at(block_id);
@@ -1241,7 +1245,7 @@ void LAllocator::ResolveControlFlow() {
void LAllocator::BuildLiveRanges() {
- HPhase phase("Build live ranges", this);
+ HPhase phase("L_Build live ranges", this);
InitializeLivenessAnalysis();
// Process the blocks in reverse order.
const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
@@ -1269,7 +1273,8 @@ void LAllocator::BuildLiveRanges() {
LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
for (int j = 0; j < move->move_operands()->length(); ++j) {
LOperand* to = move->move_operands()->at(j).destination();
- if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
+ if (to->IsUnallocated() &&
+ LUnallocated::cast(to)->virtual_register() == phi->id()) {
hint = move->move_operands()->at(j).source();
phi_operand = to;
break;
@@ -1302,7 +1307,7 @@ void LAllocator::BuildLiveRanges() {
while (!iterator.Done()) {
int operand_index = iterator.Current();
LiveRange* range = LiveRangeFor(operand_index);
- range->EnsureInterval(start, end);
+ range->EnsureInterval(start, end, zone_);
iterator.Advance();
}
@@ -1345,7 +1350,7 @@ bool LAllocator::SafePointsAreInOrder() const {
void LAllocator::PopulatePointerMaps() {
- HPhase phase("Populate pointer maps", this);
+ HPhase phase("L_Populate pointer maps", this);
const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
ASSERT(SafePointsAreInOrder());
@@ -1423,7 +1428,7 @@ void LAllocator::PopulatePointerMaps() {
TraceAlloc("Pointer in register for range %d (start at %d) "
"at safe point %d\n",
cur->id(), cur->Start().Value(), safe_point);
- LOperand* operand = cur->CreateAssignedOperand();
+ LOperand* operand = cur->CreateAssignedOperand(zone_);
ASSERT(!operand->IsStackSlot());
map->RecordPointer(operand);
}
@@ -1464,15 +1469,14 @@ void LAllocator::ProcessOsrEntry() {
void LAllocator::AllocateGeneralRegisters() {
- HPhase phase("Allocate general registers", this);
+ HPhase phase("L_Allocate general registers", this);
num_registers_ = Register::kNumAllocatableRegisters;
- mode_ = GENERAL_REGISTERS;
AllocateRegisters();
}
void LAllocator::AllocateDoubleRegisters() {
- HPhase phase("Allocate double registers", this);
+ HPhase phase("L_Allocate double registers", this);
num_registers_ = DoubleRegister::kNumAllocatableRegisters;
mode_ = DOUBLE_REGISTERS;
AllocateRegisters();
@@ -1480,7 +1484,6 @@ void LAllocator::AllocateDoubleRegisters() {
void LAllocator::AllocateRegisters() {
- ASSERT(mode_ != NONE);
ASSERT(unhandled_live_ranges_.is_empty());
for (int i = 0; i < live_ranges_.length(); ++i) {
@@ -1539,6 +1542,7 @@ void LAllocator::AllocateRegisters() {
// Do not spill live range eagerly if use position that can benefit from
// the register is too close to the start of live range.
SpillBetween(current, current->Start(), pos->pos());
+ if (!AllocationOk()) return;
ASSERT(UnhandledIsSorted());
continue;
}
@@ -1569,9 +1573,10 @@ void LAllocator::AllocateRegisters() {
ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
bool result = TryAllocateFreeReg(current);
- if (!result) {
- AllocateBlockedReg(current);
- }
+ if (!AllocationOk()) return;
+
+ if (!result) AllocateBlockedReg(current);
+ if (!AllocationOk()) return;
if (current->HasRegisterAssigned()) {
AddToActive(current);
@@ -1585,7 +1590,6 @@ void LAllocator::AllocateRegisters() {
const char* LAllocator::RegisterName(int allocation_index) {
- ASSERT(mode_ != NONE);
if (mode_ == GENERAL_REGISTERS) {
return Register::AllocationIndexToString(allocation_index);
} else {
@@ -1626,29 +1630,6 @@ RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
}
-void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
- operand->set_virtual_register(instr->id());
-}
-
-
-void LAllocator::RecordTemporary(LUnallocated* operand) {
- ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
- if (!operand->HasFixedPolicy()) {
- operand->set_virtual_register(next_virtual_register_++);
- }
-}
-
-
-void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
- operand->set_virtual_register(value->id());
-}
-
-
-int LAllocator::max_initial_value_ids() {
- return LUnallocated::kMaxVirtualRegisters / 32;
-}
-
-
void LAllocator::AddToActive(LiveRange* range) {
TraceAlloc("Add live range %d to active\n", range->id());
active_live_ranges_.Add(range);
@@ -1819,7 +1800,7 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
TraceAlloc("Assigning preferred reg %s to live range %d\n",
RegisterName(register_index),
current->id());
- current->set_assigned_register(register_index, mode_);
+ current->set_assigned_register(register_index, mode_, zone_);
return true;
}
}
@@ -1843,7 +1824,8 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
if (pos.Value() < current->End().Value()) {
// Register reg is available at the range start but becomes blocked before
// the range end. Split current at position where it becomes blocked.
- LiveRange* tail = SplitAt(current, pos);
+ LiveRange* tail = SplitRangeAt(current, pos);
+ if (!AllocationOk()) return false;
AddToUnhandledSorted(tail);
}
@@ -1854,7 +1836,7 @@ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
TraceAlloc("Assigning free reg %s to live range %d\n",
RegisterName(reg),
current->id());
- current->set_assigned_register(reg, mode_);
+ current->set_assigned_register(reg, mode_, zone_);
return true;
}
@@ -1944,7 +1926,7 @@ void LAllocator::AllocateBlockedReg(LiveRange* current) {
TraceAlloc("Assigning blocked reg %s to live range %d\n",
RegisterName(reg),
current->id());
- current->set_assigned_register(reg, mode_);
+ current->set_assigned_register(reg, mode_, zone_);
// This register was not free. Thus we need to find and spill
// parts of active and inactive live regions that use the same register
@@ -1998,7 +1980,7 @@ bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
}
-LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
+LiveRange* LAllocator::SplitRangeAt(LiveRange* range, LifetimePosition pos) {
ASSERT(!range->IsFixed());
TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
@@ -2009,8 +1991,9 @@ LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
ASSERT(pos.IsInstructionStart() ||
!chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
- LiveRange* result = LiveRangeFor(next_virtual_register_++);
- range->SplitAt(pos, result);
+ LiveRange* result = LiveRangeFor(GetVirtualRegister());
+ if (!AllocationOk()) return NULL;
+ range->SplitAt(pos, result, zone_);
return result;
}
@@ -2026,7 +2009,7 @@ LiveRange* LAllocator::SplitBetween(LiveRange* range,
LifetimePosition split_pos = FindOptimalSplitPos(start, end);
ASSERT(split_pos.Value() >= start.Value());
- return SplitAt(range, split_pos);
+ return SplitRangeAt(range, split_pos);
}
@@ -2065,7 +2048,8 @@ LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
- LiveRange* second_part = SplitAt(range, pos);
+ LiveRange* second_part = SplitRangeAt(range, pos);
+ if (!AllocationOk()) return;
Spill(second_part);
}
@@ -2074,7 +2058,8 @@ void LAllocator::SpillBetween(LiveRange* range,
LifetimePosition start,
LifetimePosition end) {
ASSERT(start.Value() < end.Value());
- LiveRange* second_part = SplitAt(range, start);
+ LiveRange* second_part = SplitRangeAt(range, start);
+ if (!AllocationOk()) return;
if (second_part->Start().Value() < end.Value()) {
// The split result intersects with [start, end[.
@@ -2107,7 +2092,7 @@ void LAllocator::Spill(LiveRange* range) {
if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
first->SetSpillOperand(op);
}
- range->MakeSpilled();
+ range->MakeSpilled(zone_);
}
diff --git a/src/3rdparty/v8/src/lithium-allocator.h b/src/3rdparty/v8/src/lithium-allocator.h
index e4e6497..f5ab055 100644
--- a/src/3rdparty/v8/src/lithium-allocator.h
+++ b/src/3rdparty/v8/src/lithium-allocator.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -146,7 +146,6 @@ class LifetimePosition {
enum RegisterKind {
- NONE,
GENERAL_REGISTERS,
DOUBLE_REGISTERS
};
@@ -217,7 +216,7 @@ class UseInterval: public ZoneObject {
// Split this interval at the given position without effecting the
// live range that owns it. The interval must contain the position.
- void SplitAt(LifetimePosition pos);
+ void SplitAt(LifetimePosition pos, Zone* zone);
// If this interval intersects with other return smallest position
// that belongs to both of them.
@@ -278,7 +277,7 @@ class LiveRange: public ZoneObject {
public:
static const int kInvalidAssignment = 0x7fffffff;
- explicit LiveRange(int id);
+ LiveRange(int id, Zone* zone);
UseInterval* first_interval() const { return first_interval_; }
UsePosition* first_pos() const { return first_pos_; }
@@ -289,11 +288,13 @@ class LiveRange: public ZoneObject {
int id() const { return id_; }
bool IsFixed() const { return id_ < 0; }
bool IsEmpty() const { return first_interval() == NULL; }
- LOperand* CreateAssignedOperand();
+ LOperand* CreateAssignedOperand(Zone* zone);
int assigned_register() const { return assigned_register_; }
int spill_start_index() const { return spill_start_index_; }
- void set_assigned_register(int reg, RegisterKind register_kind);
- void MakeSpilled();
+ void set_assigned_register(int reg,
+ RegisterKind register_kind,
+ Zone* zone);
+ void MakeSpilled(Zone* zone);
// Returns use position in this live range that follows both start
// and last processed use position.
@@ -317,9 +318,9 @@ class LiveRange: public ZoneObject {
// the range.
// All uses following the given position will be moved from this
// live range to the result live range.
- void SplitAt(LifetimePosition position, LiveRange* result);
+ void SplitAt(LifetimePosition position, LiveRange* result, Zone* zone);
- bool IsDouble() const { return assigned_register_kind_ == DOUBLE_REGISTERS; }
+ bool IsDouble() const { return is_double_; }
bool HasRegisterAssigned() const {
return assigned_register_ != kInvalidAssignment;
}
@@ -356,9 +357,15 @@ class LiveRange: public ZoneObject {
LifetimePosition FirstIntersection(LiveRange* other);
// Add a new interval or a new use position to this live range.
- void EnsureInterval(LifetimePosition start, LifetimePosition end);
- void AddUseInterval(LifetimePosition start, LifetimePosition end);
- UsePosition* AddUsePosition(LifetimePosition pos, LOperand* operand);
+ void EnsureInterval(LifetimePosition start,
+ LifetimePosition end,
+ Zone* zone);
+ void AddUseInterval(LifetimePosition start,
+ LifetimePosition end,
+ Zone* zone);
+ UsePosition* AddUsePosition(LifetimePosition pos,
+ LOperand* operand,
+ Zone* zone);
// Shorten the most recently added interval by setting a new start.
void ShortenTo(LifetimePosition start);
@@ -370,15 +377,15 @@ class LiveRange: public ZoneObject {
#endif
private:
- void ConvertOperands();
+ void ConvertOperands(Zone* zone);
UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
void AdvanceLastProcessedMarker(UseInterval* to_start_of,
LifetimePosition but_not_past) const;
int id_;
bool spilled_;
+ bool is_double_;
int assigned_register_;
- RegisterKind assigned_register_kind_;
UseInterval* last_interval_;
UseInterval* first_interval_;
UsePosition* first_pos_;
@@ -401,8 +408,8 @@ class GrowableBitVector BASE_EMBEDDED {
return bits_->Contains(value);
}
- void Add(int value) {
- EnsureCapacity(value);
+ void Add(int value, Zone* zone) {
+ EnsureCapacity(value, zone);
bits_->Add(value);
}
@@ -413,11 +420,11 @@ class GrowableBitVector BASE_EMBEDDED {
return bits_ != NULL && bits_->length() > value;
}
- void EnsureCapacity(int value) {
+ void EnsureCapacity(int value, Zone* zone) {
if (InBitsRange(value)) return;
int new_length = bits_ == NULL ? kInitialLength : bits_->length();
while (new_length <= value) new_length *= 2;
- BitVector* new_bits = new BitVector(new_length);
+ BitVector* new_bits = new(zone) BitVector(new_length, zone);
if (bits_ != NULL) new_bits->CopyFrom(*bits_);
bits_ = new_bits;
}
@@ -432,24 +439,13 @@ class LAllocator BASE_EMBEDDED {
static void TraceAlloc(const char* msg, ...);
- // Lithium translation support.
- // Record a use of an input operand in the current instruction.
- void RecordUse(HValue* value, LUnallocated* operand);
- // Record the definition of the output operand.
- void RecordDefinition(HInstruction* instr, LUnallocated* operand);
- // Record a temporary operand.
- void RecordTemporary(LUnallocated* operand);
-
// Checks whether the value of a given virtual register is tagged.
bool HasTaggedValue(int virtual_register) const;
// Returns the register kind required by the given virtual register.
RegisterKind RequiredRegisterKind(int virtual_register) const;
- // Control max function size.
- static int max_initial_value_ids();
-
- void Allocate(LChunk* chunk);
+ bool Allocate(LChunk* chunk);
const ZoneList<LiveRange*>* live_ranges() const { return &live_ranges_; }
const Vector<LiveRange*>* fixed_live_ranges() const {
@@ -462,6 +458,15 @@ class LAllocator BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
HGraph* graph() const { return graph_; }
+ int GetVirtualRegister() {
+ if (next_virtual_register_ > LUnallocated::kMaxVirtualRegisters) {
+ allocation_ok_ = false;
+ }
+ return next_virtual_register_++;
+ }
+
+ bool AllocationOk() { return allocation_ok_; }
+
void MarkAsOsrEntry() {
// There can be only one.
ASSERT(!has_osr_entry_);
@@ -534,7 +539,7 @@ class LAllocator BASE_EMBEDDED {
// Otherwise returns the live range that starts at pos and contains
// all uses from the original range that follow pos. Uses at pos will
// still be owned by the original range after splitting.
- LiveRange* SplitAt(LiveRange* range, LifetimePosition pos);
+ LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
// Split the given range in a position from the interval [start, end].
LiveRange* SplitBetween(LiveRange* range,
@@ -590,6 +595,8 @@ class LAllocator BASE_EMBEDDED {
inline LGap* GapAt(int index);
+ Zone* zone_;
+
LChunk* chunk_;
// During liveness analysis keep a mapping from block id to live_in sets
@@ -621,6 +628,9 @@ class LAllocator BASE_EMBEDDED {
bool has_osr_entry_;
+ // Indicates success or failure during register allocation.
+ bool allocation_ok_;
+
DISALLOW_COPY_AND_ASSIGN(LAllocator);
};
diff --git a/src/3rdparty/v8/src/lithium.cc b/src/3rdparty/v8/src/lithium.cc
index 31b1698..c41cce8 100644
--- a/src/3rdparty/v8/src/lithium.cc
+++ b/src/3rdparty/v8/src/lithium.cc
@@ -36,6 +36,7 @@ void LOperand::PrintTo(StringStream* stream) {
LUnallocated* unalloc = NULL;
switch (kind()) {
case INVALID:
+ stream->Add("(0)");
break;
case UNALLOCATED:
unalloc = LUnallocated::cast(this);
@@ -70,9 +71,6 @@ void LOperand::PrintTo(StringStream* stream) {
case LUnallocated::ANY:
stream->Add("(-)");
break;
- case LUnallocated::IGNORE:
- stream->Add("(0)");
- break;
}
break;
case CONSTANT_OPERAND:
@@ -96,10 +94,35 @@ void LOperand::PrintTo(StringStream* stream) {
}
}
+#define DEFINE_OPERAND_CACHE(name, type) \
+ L##name* L##name::cache = NULL; \
+ \
+ void L##name::SetUpCache() { \
+ if (cache) return; \
+ cache = new L##name[kNumCachedOperands]; \
+ for (int i = 0; i < kNumCachedOperands; i++) { \
+ cache[i].ConvertTo(type, i); \
+ } \
+ } \
+ \
+ void L##name::TearDownCache() { \
+ delete[] cache; \
+ }
+
+LITHIUM_OPERAND_LIST(DEFINE_OPERAND_CACHE)
+#undef DEFINE_OPERAND_CACHE
+
+void LOperand::SetUpCaches() {
+#define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache();
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
+#undef LITHIUM_OPERAND_SETUP
+}
+
-int LOperand::VirtualRegister() {
- LUnallocated* unalloc = LUnallocated::cast(this);
- return unalloc->virtual_register();
+void LOperand::TearDownCaches() {
+#define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache();
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
+#undef LITHIUM_OPERAND_TEARDOWN
}
diff --git a/src/3rdparty/v8/src/lithium.h b/src/3rdparty/v8/src/lithium.h
index 1e90804..2ccbf56 100644
--- a/src/3rdparty/v8/src/lithium.h
+++ b/src/3rdparty/v8/src/lithium.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -35,6 +35,14 @@
namespace v8 {
namespace internal {
+#define LITHIUM_OPERAND_LIST(V) \
+ V(ConstantOperand, CONSTANT_OPERAND) \
+ V(StackSlot, STACK_SLOT) \
+ V(DoubleStackSlot, DOUBLE_STACK_SLOT) \
+ V(Register, REGISTER) \
+ V(DoubleRegister, DOUBLE_REGISTER)
+
+
class LOperand: public ZoneObject {
public:
enum Kind {
@@ -52,15 +60,14 @@ class LOperand: public ZoneObject {
Kind kind() const { return KindField::decode(value_); }
int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
- bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
- bool IsStackSlot() const { return kind() == STACK_SLOT; }
- bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
- bool IsRegister() const { return kind() == REGISTER; }
- bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
- bool IsArgument() const { return kind() == ARGUMENT; }
- bool IsUnallocated() const { return kind() == UNALLOCATED; }
+#define LITHIUM_OPERAND_PREDICATE(name, type) \
+ bool Is##name() const { return kind() == type; }
+ LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_PREDICATE)
+ LITHIUM_OPERAND_PREDICATE(Argument, ARGUMENT)
+ LITHIUM_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
+ LITHIUM_OPERAND_PREDICATE(Ignored, INVALID)
+#undef LITHIUM_OPERAND_PREDICATE
bool Equals(LOperand* other) const { return value_ == other->value_; }
- int VirtualRegister();
void PrintTo(StringStream* stream);
void ConvertTo(Kind kind, int index) {
@@ -69,6 +76,10 @@ class LOperand: public ZoneObject {
ASSERT(this->index() == index);
}
+ // Calls SetUpCache()/TearDownCache() for each subclass.
+ static void SetUpCaches();
+ static void TearDownCaches();
+
protected:
static const int kKindFieldWidth = 3;
class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
@@ -89,8 +100,7 @@ class LUnallocated: public LOperand {
FIXED_SLOT,
MUST_HAVE_REGISTER,
WRITABLE_REGISTER,
- SAME_AS_FIRST_INPUT,
- IGNORE
+ SAME_AS_FIRST_INPUT
};
// Lifetime of operand inside the instruction.
@@ -121,9 +131,9 @@ class LUnallocated: public LOperand {
// The superclass has a KindField. Some policies have a signed fixed
// index in the upper bits.
- static const int kPolicyWidth = 4;
+ static const int kPolicyWidth = 3;
static const int kLifetimeWidth = 1;
- static const int kVirtualRegisterWidth = 17;
+ static const int kVirtualRegisterWidth = 18;
static const int kPolicyShift = kKindFieldWidth;
static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
@@ -143,12 +153,10 @@ class LUnallocated: public LOperand {
kVirtualRegisterWidth> {
};
- static const int kMaxVirtualRegisters = 1 << (kVirtualRegisterWidth + 1);
+ static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
static const int kMaxFixedIndex = 63;
static const int kMinFixedIndex = -64;
- bool HasIgnorePolicy() const { return policy() == IGNORE; }
- bool HasNoPolicy() const { return policy() == NONE; }
bool HasAnyPolicy() const {
return policy() == ANY;
}
@@ -171,7 +179,7 @@ class LUnallocated: public LOperand {
return static_cast<int>(value_) >> kFixedIndexShift;
}
- unsigned virtual_register() const {
+ int virtual_register() const {
return VirtualRegisterField::decode(value_);
}
@@ -234,9 +242,7 @@ class LMoveOperands BASE_EMBEDDED {
}
bool IsIgnored() const {
- return destination_ != NULL &&
- destination_->IsUnallocated() &&
- LUnallocated::cast(destination_)->HasIgnorePolicy();
+ return destination_ != NULL && destination_->IsIgnored();
}
// We clear both operands to indicate move that's been eliminated.
@@ -265,11 +271,12 @@ class LConstantOperand: public LOperand {
return reinterpret_cast<LConstantOperand*>(op);
}
- static void SetupCache();
+ static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
- static LConstantOperand cache[];
+ static LConstantOperand* cache;
LConstantOperand() : LOperand() { }
explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
@@ -300,11 +307,12 @@ class LStackSlot: public LOperand {
return reinterpret_cast<LStackSlot*>(op);
}
- static void SetupCache();
+ static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
- static LStackSlot cache[];
+ static LStackSlot* cache;
LStackSlot() : LOperand() { }
explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
@@ -324,11 +332,12 @@ class LDoubleStackSlot: public LOperand {
return reinterpret_cast<LDoubleStackSlot*>(op);
}
- static void SetupCache();
+ static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 128;
- static LDoubleStackSlot cache[];
+ static LDoubleStackSlot* cache;
LDoubleStackSlot() : LOperand() { }
explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
@@ -348,11 +357,12 @@ class LRegister: public LOperand {
return reinterpret_cast<LRegister*>(op);
}
- static void SetupCache();
+ static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 16;
- static LRegister cache[];
+ static LRegister* cache;
LRegister() : LOperand() { }
explicit LRegister(int index) : LOperand(REGISTER, index) { }
@@ -372,11 +382,12 @@ class LDoubleRegister: public LOperand {
return reinterpret_cast<LDoubleRegister*>(op);
}
- static void SetupCache();
+ static void SetUpCache();
+ static void TearDownCache();
private:
static const int kNumCachedOperands = 16;
- static LDoubleRegister cache[];
+ static LDoubleRegister* cache;
LDoubleRegister() : LOperand() { }
explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
@@ -443,30 +454,34 @@ class LPointerMap: public ZoneObject {
class LEnvironment: public ZoneObject {
public:
LEnvironment(Handle<JSFunction> closure,
+ FrameType frame_type,
int ast_id,
int parameter_count,
int argument_count,
int value_count,
LEnvironment* outer)
: closure_(closure),
+ frame_type_(frame_type),
arguments_stack_height_(argument_count),
deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
translation_index_(-1),
ast_id_(ast_id),
parameter_count_(parameter_count),
+ pc_offset_(-1),
values_(value_count),
- representations_(value_count),
+ is_tagged_(value_count, closure->GetHeap()->isolate()->zone()),
spilled_registers_(NULL),
spilled_double_registers_(NULL),
- outer_(outer) {
- }
+ outer_(outer) { }
Handle<JSFunction> closure() const { return closure_; }
+ FrameType frame_type() const { return frame_type_; }
int arguments_stack_height() const { return arguments_stack_height_; }
int deoptimization_index() const { return deoptimization_index_; }
int translation_index() const { return translation_index_; }
int ast_id() const { return ast_id_; }
int parameter_count() const { return parameter_count_; }
+ int pc_offset() const { return pc_offset_; }
LOperand** spilled_registers() const { return spilled_registers_; }
LOperand** spilled_double_registers() const {
return spilled_double_registers_;
@@ -476,17 +491,22 @@ class LEnvironment: public ZoneObject {
void AddValue(LOperand* operand, Representation representation) {
values_.Add(operand);
- representations_.Add(representation);
+ if (representation.IsTagged()) {
+ is_tagged_.Add(values_.length() - 1);
+ }
}
bool HasTaggedValueAt(int index) const {
- return representations_[index].IsTagged();
+ return is_tagged_.Contains(index);
}
- void Register(int deoptimization_index, int translation_index) {
+ void Register(int deoptimization_index,
+ int translation_index,
+ int pc_offset) {
ASSERT(!HasBeenRegistered());
deoptimization_index_ = deoptimization_index;
translation_index_ = translation_index;
+ pc_offset_ = pc_offset;
}
bool HasBeenRegistered() const {
return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
@@ -502,13 +522,15 @@ class LEnvironment: public ZoneObject {
private:
Handle<JSFunction> closure_;
+ FrameType frame_type_;
int arguments_stack_height_;
int deoptimization_index_;
int translation_index_;
int ast_id_;
int parameter_count_;
+ int pc_offset_;
ZoneList<LOperand*> values_;
- ZoneList<Representation> representations_;
+ BitVector is_tagged_;
// Allocation index indexed arrays of spill slot operands for registers
// that are also in spill slots at an OSR entry. NULL for environments
@@ -517,8 +539,6 @@ class LEnvironment: public ZoneObject {
LOperand** spilled_double_registers_;
LEnvironment* outer_;
-
- friend class LCodegen;
};
diff --git a/src/3rdparty/v8/src/liveedit-debugger.js b/src/3rdparty/v8/src/liveedit-debugger.js
index e05c53c..abfb0f6 100644
--- a/src/3rdparty/v8/src/liveedit-debugger.js
+++ b/src/3rdparty/v8/src/liveedit-debugger.js
@@ -325,9 +325,10 @@ Debug.LiveEdit = new function() {
if (old_node.children[i].live_shared_function_infos) {
old_node.children[i].live_shared_function_infos.
forEach(function (old_child_info) {
- %LiveEditReplaceRefToNestedFunction(old_info.info,
- corresponding_child_info,
- old_child_info.info);
+ %LiveEditReplaceRefToNestedFunction(
+ old_info.info,
+ corresponding_child_info,
+ old_child_info.info);
});
}
}
@@ -381,7 +382,7 @@ Debug.LiveEdit = new function() {
position: break_point_position,
line: break_point.line(),
column: break_point.column()
- }
+ };
break_point_old_positions.push(old_position_description);
}
@@ -418,7 +419,7 @@ Debug.LiveEdit = new function() {
position: updated_position,
line: new_location.line,
column: new_location.column
- }
+ };
break_point.set(original_script);
@@ -428,7 +429,7 @@ Debug.LiveEdit = new function() {
new_positions: new_position_description
} );
}
- }
+ };
}
@@ -465,7 +466,7 @@ Debug.LiveEdit = new function() {
}
PosTranslator.prototype.GetChunks = function() {
return this.chunks;
- }
+ };
PosTranslator.prototype.Translate = function(pos, inside_chunk_handler) {
var array = this.chunks;
@@ -492,18 +493,18 @@ Debug.LiveEdit = new function() {
inside_chunk_handler = PosTranslator.DefaultInsideChunkHandler;
}
return inside_chunk_handler(pos, chunk);
- }
+ };
PosTranslator.DefaultInsideChunkHandler = function(pos, diff_chunk) {
Assert(false, "Cannot translate position in changed area");
- }
+ };
PosTranslator.ShiftWithTopInsideChunkHandler =
function(pos, diff_chunk) {
// We carelessly do not check whether we stay inside the chunk after
// translation.
return pos - diff_chunk.pos1 + diff_chunk.pos2;
- }
+ };
var FunctionStatus = {
// No change to function or its inner functions; however its positions
@@ -517,7 +518,7 @@ Debug.LiveEdit = new function() {
CHANGED: "changed",
// Function is changed but cannot be patched.
DAMAGED: "damaged"
- }
+ };
function CodeInfoTreeNode(code_info, children, array_index) {
this.info = code_info;
@@ -580,19 +581,19 @@ Debug.LiveEdit = new function() {
// children of unchanged functions are ignored.
function MarkChangedFunctions(code_info_tree, chunks) {
- // A convenient interator over diff chunks that also translates
+ // A convenient iterator over diff chunks that also translates
// positions from old to new in a current non-changed part of script.
var chunk_it = new function() {
var chunk_index = 0;
var pos_diff = 0;
- this.current = function() { return chunks[chunk_index]; }
+ this.current = function() { return chunks[chunk_index]; };
this.next = function() {
var chunk = chunks[chunk_index];
pos_diff = chunk.pos2 + chunk.len2 - (chunk.pos1 + chunk.len1);
chunk_index++;
- }
- this.done = function() { return chunk_index >= chunks.length; }
- this.TranslatePos = function(pos) { return pos + pos_diff; }
+ };
+ this.done = function() { return chunk_index >= chunks.length; };
+ this.TranslatePos = function(pos) { return pos + pos_diff; };
};
// A recursive function that processes internals of a function and all its
@@ -946,16 +947,16 @@ Debug.LiveEdit = new function() {
BLOCKED_ON_OTHER_STACK: 3,
BLOCKED_UNDER_NATIVE_CODE: 4,
REPLACED_ON_ACTIVE_STACK: 5
- }
+ };
FunctionPatchabilityStatus.SymbolName = function(code) {
- var enum = FunctionPatchabilityStatus;
- for (name in enum) {
- if (enum[name] == code) {
+ var enumeration = FunctionPatchabilityStatus;
+ for (name in enumeration) {
+ if (enumeration[name] == code) {
return name;
}
}
- }
+ };
// A logical failure in liveedit process. This means that change_log
@@ -968,7 +969,7 @@ Debug.LiveEdit = new function() {
Failure.prototype.toString = function() {
return "LiveEdit Failure: " + this.message;
- }
+ };
// A testing entry.
function GetPcFromSourcePos(func, source_pos) {
@@ -1078,5 +1079,5 @@ Debug.LiveEdit = new function() {
PosTranslator: PosTranslator,
CompareStrings: CompareStrings,
ApplySingleChunkPatch: ApplySingleChunkPatch
- }
-}
+ };
+};
diff --git a/src/3rdparty/v8/src/liveedit.cc b/src/3rdparty/v8/src/liveedit.cc
index 6107cbf..9c5294a 100644
--- a/src/3rdparty/v8/src/liveedit.cc
+++ b/src/3rdparty/v8/src/liveedit.cc
@@ -53,8 +53,8 @@ void SetElementNonStrict(Handle<JSObject> object,
// Ignore return value from SetElement. It can only be a failure if there
// are element setters causing exceptions and the debugger context has none
// of these.
- Handle<Object> no_failure;
- no_failure = SetElement(object, index, value, kNonStrictMode);
+ Handle<Object> no_failure =
+ JSObject::SetElement(object, index, value, NONE, kNonStrictMode);
ASSERT(!no_failure.is_null());
USE(no_failure);
}
@@ -602,7 +602,8 @@ static void CompileScriptForTracker(Isolate* isolate, Handle<Script> script) {
// Build AST.
CompilationInfo info(script);
info.MarkAsGlobal();
- if (ParserApi::Parse(&info)) {
+ // Parse and don't allow skipping lazy functions.
+ if (ParserApi::Parse(&info, kNoParsingFlags)) {
// Compile the code.
LiveEditFunctionTracker tracker(info.isolate(), info.function());
if (Compiler::MakeCodeForLiveEdit(&info)) {
@@ -797,7 +798,7 @@ class FunctionInfoListener {
HandleScope scope;
FunctionInfoWrapper info = FunctionInfoWrapper::Create();
info.SetInitialProperties(fun->name(), fun->start_position(),
- fun->end_position(), fun->num_parameters(),
+ fun->end_position(), fun->parameter_count(),
current_parent_index_);
current_parent_index_ = len_;
SetElementNonStrict(result_, len_, info.GetJSArray());
@@ -855,38 +856,20 @@ class FunctionInfoListener {
return HEAP->undefined_value();
}
do {
- ZoneList<Variable*> list(10);
- outer_scope->CollectUsedVariables(&list);
- int j = 0;
- for (int i = 0; i < list.length(); i++) {
- Variable* var1 = list[i];
- if (var1->IsContextSlot()) {
- if (j != i) {
- list[j] = var1;
- }
- j++;
- }
- }
+ ZoneList<Variable*> stack_list(outer_scope->StackLocalCount());
+ ZoneList<Variable*> context_list(outer_scope->ContextLocalCount());
+ outer_scope->CollectStackAndContextLocals(&stack_list, &context_list);
+ context_list.Sort(&Variable::CompareIndex);
- // Sort it.
- for (int k = 1; k < j; k++) {
- int l = k;
- for (int m = k + 1; m < j; m++) {
- if (list[l]->index() > list[m]->index()) {
- l = m;
- }
- }
- list[k] = list[l];
- }
- for (int i = 0; i < j; i++) {
+ for (int i = 0; i < context_list.length(); i++) {
SetElementNonStrict(scope_info_list,
scope_info_length,
- list[i]->name());
+ context_list[i]->name());
scope_info_length++;
SetElementNonStrict(
scope_info_list,
scope_info_length,
- Handle<Smi>(Smi::FromInt(list[i]->index())));
+ Handle<Smi>(Smi::FromInt(context_list[i]->index())));
scope_info_length++;
}
SetElementNonStrict(scope_info_list,
@@ -1108,7 +1091,7 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
ReplaceCodeObject(shared_info->code(), *code);
Handle<Object> code_scope_info = compile_info_wrapper.GetCodeScopeInfo();
if (code_scope_info->IsFixedArray()) {
- shared_info->set_scope_info(SerializedScopeInfo::cast(*code_scope_info));
+ shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info));
}
}
@@ -1245,7 +1228,7 @@ class RelocInfoBuffer {
V8::FatalProcessOutOfMemory("RelocInfoBuffer::GrowBuffer");
}
- // Setup new buffer.
+ // Set up new buffer.
byte* new_buffer = NewArray<byte>(new_buffer_size);
// Copy the data.
diff --git a/src/3rdparty/v8/src/liveobjectlist-inl.h b/src/3rdparty/v8/src/liveobjectlist-inl.h
index f742de3..2bc2296 100644
--- a/src/3rdparty/v8/src/liveobjectlist-inl.h
+++ b/src/3rdparty/v8/src/liveobjectlist-inl.h
@@ -59,7 +59,7 @@ void LiveObjectList::IterateElements(ObjectVisitor* v) {
}
-void LiveObjectList::ProcessNonLive(HeapObject *obj) {
+void LiveObjectList::ProcessNonLive(HeapObject* obj) {
// Only do work if we have at least one list to process.
if (last()) DoProcessNonLive(obj);
}
@@ -93,7 +93,7 @@ LiveObjectList* LiveObjectList::FindLolForId(int id,
template <typename T>
inline LiveObjectList::Element*
LiveObjectList::FindElementFor(T (*GetValue)(LiveObjectList::Element*), T key) {
- LiveObjectList *lol = last();
+ LiveObjectList* lol = last();
while (lol != NULL) {
Element* elements = lol->elements_;
for (int i = 0; i < lol->obj_count_; i++) {
diff --git a/src/3rdparty/v8/src/liveobjectlist.cc b/src/3rdparty/v8/src/liveobjectlist.cc
index 408e2a3..1aabc59 100644
--- a/src/3rdparty/v8/src/liveobjectlist.cc
+++ b/src/3rdparty/v8/src/liveobjectlist.cc
@@ -165,7 +165,7 @@ const char* GetObjectTypeDesc(HeapObject* heap_obj) {
}
-bool IsOfType(LiveObjectType type, HeapObject *obj) {
+bool IsOfType(LiveObjectType type, HeapObject* obj) {
// Note: there are types that are more general (e.g. JSObject) that would
// have passed the Is##type_() test for more specialized types (e.g.
// JSFunction). If we find a more specialized match but we're looking for
@@ -211,7 +211,7 @@ static AllocationSpace FindSpaceFor(String* space_str) {
}
-static bool InSpace(AllocationSpace space, HeapObject *heap_obj) {
+static bool InSpace(AllocationSpace space, HeapObject* heap_obj) {
Heap* heap = ISOLATE->heap();
if (space != LO_SPACE) {
return heap->InSpace(heap_obj, space);
@@ -462,7 +462,7 @@ static int CompactString(char* str) {
char prev_ch = 0;
while (*dst != '\0') {
char ch = *src++;
- // We will treat non-ascii chars as '?'.
+ // We will treat non-ASCII chars as '?'.
if ((ch & 0x80) != 0) {
ch = '?';
}
@@ -498,7 +498,7 @@ static void GenerateObjectDesc(HeapObject* obj,
length);
} else if (obj->IsString()) {
- String *str = String::cast(obj);
+ String* str = String::cast(obj);
// Only grab up to 160 chars in case they are double byte.
// We'll only dump 80 of them after we compact them.
const int kMaxCharToDump = 80;
@@ -842,7 +842,7 @@ class LiveObjectSummary {
bool found_root_;
bool found_weak_root_;
- LolFilter *filter_;
+ LolFilter* filter_;
};
@@ -857,8 +857,8 @@ class SummaryWriter {
// A summary writer for filling in a summary of lol lists and diffs.
class LolSummaryWriter: public SummaryWriter {
public:
- LolSummaryWriter(LiveObjectList *older_lol,
- LiveObjectList *newer_lol)
+ LolSummaryWriter(LiveObjectList* older_lol,
+ LiveObjectList* newer_lol)
: older_(older_lol), newer_(newer_lol) {
}
@@ -944,7 +944,7 @@ LiveObjectList::~LiveObjectList() {
int LiveObjectList::GetTotalObjCountAndSize(int* size_p) {
int size = 0;
int count = 0;
- LiveObjectList *lol = this;
+ LiveObjectList* lol = this;
do {
// Only compute total size if requested i.e. when size_p is not null.
if (size_p != NULL) {
@@ -1183,7 +1183,7 @@ MaybeObject* LiveObjectList::Capture() {
// only time we'll actually delete the lol is when we Reset() or if the lol is
// invisible, and its element count reaches 0.
bool LiveObjectList::Delete(int id) {
- LiveObjectList *lol = last();
+ LiveObjectList* lol = last();
while (lol != NULL) {
if (lol->id() == id) {
break;
@@ -1246,8 +1246,8 @@ MaybeObject* LiveObjectList::Dump(int older_id,
newer_id = temp;
}
- LiveObjectList *newer_lol = FindLolForId(newer_id, last());
- LiveObjectList *older_lol = FindLolForId(older_id, newer_lol);
+ LiveObjectList* newer_lol = FindLolForId(newer_id, last());
+ LiveObjectList* older_lol = FindLolForId(older_id, newer_lol);
// If the id is defined, and we can't find a LOL for it, then we have an
// invalid id.
@@ -1365,8 +1365,8 @@ MaybeObject* LiveObjectList::Summarize(int older_id,
newer_id = temp;
}
- LiveObjectList *newer_lol = FindLolForId(newer_id, last());
- LiveObjectList *older_lol = FindLolForId(older_id, newer_lol);
+ LiveObjectList* newer_lol = FindLolForId(newer_id, last());
+ LiveObjectList* older_lol = FindLolForId(older_id, newer_lol);
// If the id is defined, and we can't find a LOL for it, then we have an
// invalid id.
@@ -1626,7 +1626,7 @@ MaybeObject* LiveObjectList::Info(int start_idx, int dump_limit) {
// Deletes all captured lols.
void LiveObjectList::Reset() {
- LiveObjectList *lol = last();
+ LiveObjectList* lol = last();
// Just delete the last. Each lol will delete it's prev automatically.
delete lol;
@@ -1715,8 +1715,8 @@ class LolVisitor: public ObjectVisitor {
inline bool AddRootRetainerIfFound(const LolVisitor& visitor,
LolFilter* filter,
- LiveObjectSummary *summary,
- void (*SetRootFound)(LiveObjectSummary *s),
+ LiveObjectSummary* summary,
+ void (*SetRootFound)(LiveObjectSummary* s),
int start,
int dump_limit,
int* total_count,
@@ -1762,12 +1762,12 @@ inline bool AddRootRetainerIfFound(const LolVisitor& visitor,
}
-inline void SetFoundRoot(LiveObjectSummary *summary) {
+inline void SetFoundRoot(LiveObjectSummary* summary) {
summary->set_found_root();
}
-inline void SetFoundWeakRoot(LiveObjectSummary *summary) {
+inline void SetFoundWeakRoot(LiveObjectSummary* summary) {
summary->set_found_weak_root();
}
@@ -1779,7 +1779,7 @@ int LiveObjectList::GetRetainers(Handle<HeapObject> target,
int dump_limit,
int* total_count,
LolFilter* filter,
- LiveObjectSummary *summary,
+ LiveObjectSummary* summary,
JSFunction* arguments_function,
Handle<Object> error) {
HandleScope scope;
@@ -2267,7 +2267,7 @@ Object* LiveObjectList::GetPath(int obj_id1,
}
-void LiveObjectList::DoProcessNonLive(HeapObject *obj) {
+void LiveObjectList::DoProcessNonLive(HeapObject* obj) {
// We should only be called if we have at least one lol to search.
ASSERT(last() != NULL);
Element* element = last()->Find(obj);
@@ -2284,7 +2284,7 @@ void LiveObjectList::IterateElementsPrivate(ObjectVisitor* v) {
int count = lol->obj_count_;
for (int i = 0; i < count; i++) {
HeapObject** p = &elements[i].obj_;
- v->VisitPointer(reinterpret_cast<Object **>(p));
+ v->VisitPointer(reinterpret_cast<Object** >(p));
}
lol = lol->prev_;
}
@@ -2389,11 +2389,11 @@ void LiveObjectList::GCEpiloguePrivate() {
PurgeDuplicates();
// After the GC, sweep away all free'd Elements and compact.
- LiveObjectList *prev = NULL;
- LiveObjectList *next = NULL;
+ LiveObjectList* prev = NULL;
+ LiveObjectList* next = NULL;
// Iterating from the youngest lol to the oldest lol.
- for (LiveObjectList *lol = last(); lol; lol = prev) {
+ for (LiveObjectList* lol = last(); lol; lol = prev) {
Element* elements = lol->elements_;
prev = lol->prev(); // Save the prev.
@@ -2446,7 +2446,7 @@ void LiveObjectList::GCEpiloguePrivate() {
const int kMaxUnusedSpace = 64;
if (diff > kMaxUnusedSpace) { // Threshold for shrinking.
// Shrink the list.
- Element *new_elements = NewArray<Element>(new_count);
+ Element* new_elements = NewArray<Element>(new_count);
memcpy(new_elements, elements, new_count * sizeof(Element));
DeleteArray<Element>(elements);
diff --git a/src/3rdparty/v8/src/liveobjectlist.h b/src/3rdparty/v8/src/liveobjectlist.h
index 65470d7..1aa9196 100644
--- a/src/3rdparty/v8/src/liveobjectlist.h
+++ b/src/3rdparty/v8/src/liveobjectlist.h
@@ -77,7 +77,7 @@ class LiveObjectList {
inline static void GCEpilogue();
inline static void GCPrologue();
inline static void IterateElements(ObjectVisitor* v);
- inline static void ProcessNonLive(HeapObject *obj);
+ inline static void ProcessNonLive(HeapObject* obj);
inline static void UpdateReferencesForScavengeGC();
// Note: LOLs can be listed by calling Dump(0, <lol id>), and 2 LOLs can be
@@ -125,7 +125,7 @@ class LiveObjectList {
static void GCEpiloguePrivate();
static void IterateElementsPrivate(ObjectVisitor* v);
- static void DoProcessNonLive(HeapObject *obj);
+ static void DoProcessNonLive(HeapObject* obj);
static int CompareElement(const Element* a, const Element* b);
@@ -138,7 +138,7 @@ class LiveObjectList {
int dump_limit,
int* total_count,
LolFilter* filter,
- LiveObjectSummary *summary,
+ LiveObjectSummary* summary,
JSFunction* arguments_function,
Handle<Object> error);
@@ -151,7 +151,7 @@ class LiveObjectList {
bool is_tracking_roots);
static bool NeedLOLProcessing() { return (last() != NULL); }
- static void NullifyNonLivePointer(HeapObject **p) {
+ static void NullifyNonLivePointer(HeapObject** p) {
// Mask out the low bit that marks this as a heap object. We'll use this
// cleared bit as an indicator that this pointer needs to be collected.
//
@@ -202,7 +202,7 @@ class LiveObjectList {
int id_;
int capacity_;
int obj_count_;
- Element *elements_;
+ Element* elements_;
// Statics for managing all the lists.
static uint32_t next_element_id_;
diff --git a/src/3rdparty/v8/src/log.cc b/src/3rdparty/v8/src/log.cc
index bad5fdc..d93a9d8 100644
--- a/src/3rdparty/v8/src/log.cc
+++ b/src/3rdparty/v8/src/log.cc
@@ -35,6 +35,7 @@
#include "global-handles.h"
#include "log.h"
#include "macro-assembler.h"
+#include "platform.h"
#include "runtime-profiler.h"
#include "serialize.h"
#include "string-stream.h"
@@ -461,18 +462,20 @@ class Logger::NameBuffer {
utf8_pos_ += utf8_length;
return;
}
- int uc16_length = Min(str->length(), kUc16BufferSize);
- String::WriteToFlat(str, uc16_buffer_, 0, uc16_length);
+ int uc16_length = Min(str->length(), kUtf16BufferSize);
+ String::WriteToFlat(str, utf16_buffer, 0, uc16_length);
+ int previous = unibrow::Utf16::kNoPreviousCharacter;
for (int i = 0; i < uc16_length && utf8_pos_ < kUtf8BufferSize; ++i) {
- uc16 c = uc16_buffer_[i];
+ uc16 c = utf16_buffer[i];
if (c <= String::kMaxAsciiCharCodeU) {
utf8_buffer_[utf8_pos_++] = static_cast<char>(c);
} else {
- int char_length = unibrow::Utf8::Length(c);
+ int char_length = unibrow::Utf8::Length(c, previous);
if (utf8_pos_ + char_length > kUtf8BufferSize) break;
- unibrow::Utf8::Encode(utf8_buffer_ + utf8_pos_, c);
+ unibrow::Utf8::Encode(utf8_buffer_ + utf8_pos_, c, previous);
utf8_pos_ += char_length;
}
+ previous = c;
}
}
@@ -504,11 +507,11 @@ class Logger::NameBuffer {
private:
static const int kUtf8BufferSize = 512;
- static const int kUc16BufferSize = 128;
+ static const int kUtf16BufferSize = 128;
int utf8_pos_;
char utf8_buffer_[kUtf8BufferSize];
- uc16 uc16_buffer_[kUc16BufferSize];
+ uc16 utf16_buffer[kUtf16BufferSize];
};
@@ -1450,6 +1453,8 @@ void Logger::LogCodeInfo() {
const char arch[] = "x64";
#elif V8_TARGET_ARCH_ARM
const char arch[] = "arm";
+#elif V8_TARGET_ARCH_MIPS
+ const char arch[] = "mips";
#else
const char arch[] = "unknown";
#endif
@@ -1519,7 +1524,8 @@ void Logger::LowLevelLogWriteBytes(const char* bytes, int size) {
void Logger::LogCodeObjects() {
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogCodeObjects");
HeapIterator iterator;
AssertNoAllocation no_alloc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
@@ -1574,7 +1580,8 @@ void Logger::LogExistingFunction(Handle<SharedFunctionInfo> shared,
void Logger::LogCompiledFunctions() {
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogCompiledFunctions");
HandleScope scope;
const int compiled_funcs_count = EnumerateCompiledFunctions(NULL, NULL);
ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
@@ -1593,7 +1600,8 @@ void Logger::LogCompiledFunctions() {
void Logger::LogAccessorCallbacks() {
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "Logger::LogAccessorCallbacks");
HeapIterator iterator;
AssertNoAllocation no_alloc;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
@@ -1613,7 +1621,7 @@ void Logger::LogAccessorCallbacks() {
}
-bool Logger::Setup() {
+bool Logger::SetUp() {
// Tests and EnsureInitialize() can call this twice in a row. It's harmless.
if (is_initialized_) return true;
is_initialized_ = true;
@@ -1706,9 +1714,9 @@ FILE* Logger::TearDown() {
void Logger::EnableSlidingStateWindow() {
- // If the ticker is NULL, Logger::Setup has not been called yet. In
+ // If the ticker is NULL, Logger::SetUp has not been called yet. In
// that case, we set the sliding_state_window flag so that the
- // sliding window computation will be started when Logger::Setup is
+ // sliding window computation will be started when Logger::SetUp is
// called.
if (ticker_ == NULL) {
FLAG_sliding_state_window = true;
@@ -1721,13 +1729,21 @@ void Logger::EnableSlidingStateWindow() {
}
}
+// Protects the state below.
+static Mutex* active_samplers_mutex = NULL;
-Mutex* SamplerRegistry::mutex_ = OS::CreateMutex();
List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
+void SamplerRegistry::SetUp() {
+ if (!active_samplers_mutex) {
+ active_samplers_mutex = OS::CreateMutex();
+ }
+}
+
+
bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
- ScopedLock lock(mutex_);
+ ScopedLock lock(active_samplers_mutex);
for (int i = 0;
ActiveSamplersExist() && i < active_samplers_->length();
++i) {
@@ -1754,7 +1770,7 @@ SamplerRegistry::State SamplerRegistry::GetState() {
void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
ASSERT(sampler->IsActive());
- ScopedLock lock(mutex_);
+ ScopedLock lock(active_samplers_mutex);
if (active_samplers_ == NULL) {
active_samplers_ = new List<Sampler*>;
} else {
@@ -1766,7 +1782,7 @@ void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
ASSERT(sampler->IsActive());
- ScopedLock lock(mutex_);
+ ScopedLock lock(active_samplers_mutex);
ASSERT(active_samplers_ != NULL);
bool removed = active_samplers_->RemoveElement(sampler);
ASSERT(removed);
diff --git a/src/3rdparty/v8/src/log.h b/src/3rdparty/v8/src/log.h
index 4d76fc8..03c7b3b 100644
--- a/src/3rdparty/v8/src/log.h
+++ b/src/3rdparty/v8/src/log.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -71,7 +71,6 @@ namespace internal {
// tick profiler requires code events, so --prof implies --log-code.
// Forward declarations.
-class HashMap;
class LogMessageBuilder;
class Profiler;
class Semaphore;
@@ -150,14 +149,14 @@ class Logger {
#undef DECLARE_ENUM
// Acquires resources for logging if the right flags are set.
- bool Setup();
+ bool SetUp();
void EnsureTickerStarted();
void EnsureTickerStopped();
Sampler* sampler();
- // Frees resources acquired in Setup.
+ // Frees resources acquired in SetUp.
// When a temporary file is used for the log, returns its stream descriptor,
// leaving the file open.
FILE* TearDown();
@@ -295,7 +294,13 @@ class Logger {
INLINE(static LogEventsAndTags ToNativeByScript(LogEventsAndTags, Script*));
// Profiler's sampling interval (in milliseconds).
+#if defined(ANDROID)
+ // Phones and tablets have processors that are much slower than desktop
+ // and laptop computers for which current heuristics are tuned.
+ static const int kSamplingIntervalMs = 5;
+#else
static const int kSamplingIntervalMs = 1;
+#endif
// Callback from Log, stops profiling in case of insufficient resources.
void LogFailure();
@@ -405,7 +410,7 @@ class Logger {
NameMap* address_to_name_map_;
// Guards against multiple calls to TearDown() that can happen in some tests.
- // 'true' between Setup() and TearDown().
+ // 'true' between SetUp() and TearDown().
bool is_initialized_;
// Support for 'incremental addresses' in compressed logs:
@@ -432,6 +437,8 @@ class SamplerRegistry : public AllStatic {
HAS_CPU_PROFILING_SAMPLERS
};
+ static void SetUp();
+
typedef void (*VisitSampler)(Sampler*, void*);
static State GetState();
@@ -449,7 +456,6 @@ class SamplerRegistry : public AllStatic {
return active_samplers_ != NULL && !active_samplers_->is_empty();
}
- static Mutex* mutex_; // Protects the state below.
static List<Sampler*>* active_samplers_;
DISALLOW_IMPLICIT_CONSTRUCTORS(SamplerRegistry);
diff --git a/src/3rdparty/v8/src/macro-assembler.h b/src/3rdparty/v8/src/macro-assembler.h
index 364fdb6..11e2217 100644
--- a/src/3rdparty/v8/src/macro-assembler.h
+++ b/src/3rdparty/v8/src/macro-assembler.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -36,27 +36,6 @@ enum InvokeFlag {
};
-enum CodeLocation {
- IN_JAVASCRIPT,
- IN_JS_ENTRY,
- IN_C_ENTRY
-};
-
-
-enum HandlerType {
- TRY_CATCH_HANDLER,
- TRY_FINALLY_HANDLER,
- JS_ENTRY_HANDLER
-};
-
-
-// Types of uncatchable exceptions.
-enum UncatchableExceptionType {
- OUT_OF_MEMORY,
- TERMINATION
-};
-
-
// Invalid depth in prototype chain.
const int kInvalidProtoDepth = -1;
diff --git a/src/3rdparty/v8/src/macros.py b/src/3rdparty/v8/src/macros.py
index bf7119f..93287ae 100644
--- a/src/3rdparty/v8/src/macros.py
+++ b/src/3rdparty/v8/src/macros.py
@@ -26,7 +26,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Dictionary that is passed as defines for js2c.py.
-# Used for defines that must be defined for all native js files.
+# Used for defines that must be defined for all native JS files.
const NONE = 0;
const READ_ONLY = 1;
@@ -101,6 +101,9 @@ macro IS_OBJECT(arg) = (%_IsObject(arg));
macro IS_ARRAY(arg) = (%_IsArray(arg));
macro IS_FUNCTION(arg) = (%_IsFunction(arg));
macro IS_REGEXP(arg) = (%_IsRegExp(arg));
+macro IS_SET(arg) = (%_ClassOf(arg) === 'Set');
+macro IS_MAP(arg) = (%_ClassOf(arg) === 'Map');
+macro IS_WEAKMAP(arg) = (%_ClassOf(arg) === 'WeakMap');
macro IS_DATE(arg) = (%_ClassOf(arg) === 'Date');
macro IS_NUMBER_WRAPPER(arg) = (%_ClassOf(arg) === 'Number');
macro IS_STRING_WRAPPER(arg) = (%_ClassOf(arg) === 'String');
@@ -161,16 +164,36 @@ const MAX_TIME_BEFORE_UTC = 8640002592000000;
# Gets the value of a Date object. If arg is not a Date object
# a type error is thrown.
-macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
-macro DAY(time) = ($floor(time / 86400000));
-macro NAN_OR_DATE_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : DateFromTime(time));
-macro HOUR_FROM_TIME(time) = (Modulo($floor(time / 3600000), 24));
-macro MIN_FROM_TIME(time) = (Modulo($floor(time / 60000), 60));
-macro NAN_OR_MIN_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MIN_FROM_TIME(time));
-macro SEC_FROM_TIME(time) = (Modulo($floor(time / 1000), 60));
-macro NAN_OR_SEC_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : SEC_FROM_TIME(time));
-macro MS_FROM_TIME(time) = (Modulo(time, 1000));
-macro NAN_OR_MS_FROM_TIME(time) = (NUMBER_IS_NAN(time) ? time : MS_FROM_TIME(time));
+macro CHECK_DATE(arg) = if (%_ClassOf(arg) !== 'Date') ThrowDateTypeError();
+macro LOCAL_DATE_VALUE(arg) = (%_DateField(arg, 0) + %_DateField(arg, 21));
+macro UTC_DATE_VALUE(arg) = (%_DateField(arg, 0));
+
+macro LOCAL_YEAR(arg) = (%_DateField(arg, 1));
+macro LOCAL_MONTH(arg) = (%_DateField(arg, 2));
+macro LOCAL_DAY(arg) = (%_DateField(arg, 3));
+macro LOCAL_WEEKDAY(arg) = (%_DateField(arg, 4));
+macro LOCAL_HOUR(arg) = (%_DateField(arg, 5));
+macro LOCAL_MIN(arg) = (%_DateField(arg, 6));
+macro LOCAL_SEC(arg) = (%_DateField(arg, 7));
+macro LOCAL_MS(arg) = (%_DateField(arg, 8));
+macro LOCAL_DAYS(arg) = (%_DateField(arg, 9));
+macro LOCAL_TIME_IN_DAY(arg) = (%_DateField(arg, 10));
+
+macro UTC_YEAR(arg) = (%_DateField(arg, 11));
+macro UTC_MONTH(arg) = (%_DateField(arg, 12));
+macro UTC_DAY(arg) = (%_DateField(arg, 13));
+macro UTC_WEEKDAY(arg) = (%_DateField(arg, 14));
+macro UTC_HOUR(arg) = (%_DateField(arg, 15));
+macro UTC_MIN(arg) = (%_DateField(arg, 16));
+macro UTC_SEC(arg) = (%_DateField(arg, 17));
+macro UTC_MS(arg) = (%_DateField(arg, 18));
+macro UTC_DAYS(arg) = (%_DateField(arg, 19));
+macro UTC_TIME_IN_DAY(arg) = (%_DateField(arg, 20));
+
+macro TIMEZONE_OFFSET(arg) = (%_DateField(arg, 21));
+
+macro SET_UTC_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 1));
+macro SET_LOCAL_DATE_VALUE(arg, value) = (%DateSetValue(arg, value, 0));
# Last input and last subject of regexp matches.
macro LAST_SUBJECT(array) = ((array)[1]);
diff --git a/src/3rdparty/v8/src/mark-compact-inl.h b/src/3rdparty/v8/src/mark-compact-inl.h
index 573715e..43f6b89 100644
--- a/src/3rdparty/v8/src/mark-compact-inl.h
+++ b/src/3rdparty/v8/src/mark-compact-inl.h
@@ -45,7 +45,10 @@ MarkBit Marking::MarkBitFrom(Address addr) {
void MarkCompactCollector::SetFlags(int flags) {
- sweep_precisely_ = ((flags & Heap::kMakeHeapIterableMask) != 0);
+ sweep_precisely_ = ((flags & Heap::kSweepPreciselyMask) != 0);
+ reduce_memory_footprint_ = ((flags & Heap::kReduceMemoryFootprintMask) != 0);
+ abort_incremental_marking_ =
+ ((flags & Heap::kAbortIncrementalMarkingMask) != 0);
}
@@ -53,17 +56,33 @@ void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
if (!mark_bit.Get()) {
mark_bit.Set();
- MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
ProcessNewlyMarkedObject(obj);
}
}
+bool MarkCompactCollector::MarkObjectWithoutPush(HeapObject* object) {
+ MarkBit mark = Marking::MarkBitFrom(object);
+ bool old_mark = mark.Get();
+ if (!old_mark) SetMark(object, mark);
+ return old_mark;
+}
+
+
+void MarkCompactCollector::MarkObjectAndPush(HeapObject* object) {
+ if (!MarkObjectWithoutPush(object)) marking_deque_.PushBlack(object);
+}
+
+
void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
ASSERT(!mark_bit.Get());
ASSERT(Marking::MarkBitFrom(obj) == mark_bit);
mark_bit.Set();
- MemoryChunk::IncrementLiveBytes(obj->address(), obj->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(obj->address(), obj->Size());
+ if (obj->IsMap()) {
+ heap_->ClearCacheOnMap(Map::cast(obj));
+ }
}
diff --git a/src/3rdparty/v8/src/mark-compact.cc b/src/3rdparty/v8/src/mark-compact.cc
index bf0aab8..b4f488b 100644
--- a/src/3rdparty/v8/src/mark-compact.cc
+++ b/src/3rdparty/v8/src/mark-compact.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -60,9 +60,12 @@ MarkCompactCollector::MarkCompactCollector() : // NOLINT
state_(IDLE),
#endif
sweep_precisely_(false),
+ reduce_memory_footprint_(false),
+ abort_incremental_marking_(false),
compacting_(false),
was_marked_incrementally_(false),
collect_maps_(FLAG_collect_maps),
+ flush_monomorphic_ics_(false),
tracer_(NULL),
migration_slots_buffer_(NULL),
heap_(NULL),
@@ -106,14 +109,14 @@ static void VerifyMarking(NewSpace* space) {
Address end = space->top();
NewSpacePageIterator it(space->bottom(), end);
// The bottom position is at the start of its page. Allows us to use
- // page->body() as start of range on all pages.
+ // page->area_start() as start of range on all pages.
ASSERT_EQ(space->bottom(),
- NewSpacePage::FromAddress(space->bottom())->body());
+ NewSpacePage::FromAddress(space->bottom())->area_start());
while (it.has_next()) {
NewSpacePage* page = it.next();
- Address limit = it.has_next() ? page->body_limit() : end;
+ Address limit = it.has_next() ? page->area_end() : end;
ASSERT(limit == end || !page->Contains(end));
- VerifyMarking(page->body(), limit);
+ VerifyMarking(page->area_start(), limit);
}
}
@@ -123,7 +126,7 @@ static void VerifyMarking(PagedSpace* space) {
while (it.has_next()) {
Page* p = it.next();
- VerifyMarking(p->ObjectAreaStart(), p->ObjectAreaEnd());
+ VerifyMarking(p->area_start(), p->area_end());
}
}
@@ -186,8 +189,8 @@ static void VerifyEvacuation(NewSpace* space) {
while (it.has_next()) {
NewSpacePage* page = it.next();
- Address current = page->body();
- Address limit = it.has_next() ? page->body_limit() : space->top();
+ Address current = page->area_start();
+ Address limit = it.has_next() ? page->area_end() : space->top();
ASSERT(limit == space->top() || !page->Contains(space->top()));
while (current < limit) {
HeapObject* object = HeapObject::FromAddress(current);
@@ -204,7 +207,7 @@ static void VerifyEvacuation(PagedSpace* space) {
while (it.has_next()) {
Page* p = it.next();
if (p->IsEvacuationCandidate()) continue;
- VerifyEvacuation(p->ObjectAreaStart(), p->ObjectAreaEnd());
+ VerifyEvacuation(p->area_start(), p->area_end());
}
}
@@ -229,15 +232,34 @@ void MarkCompactCollector::AddEvacuationCandidate(Page* p) {
}
-bool MarkCompactCollector::StartCompaction() {
+static void TraceFragmentation(PagedSpace* space) {
+ int number_of_pages = space->CountTotalPages();
+ intptr_t reserved = (number_of_pages * space->AreaSize());
+ intptr_t free = reserved - space->SizeOfObjects();
+ PrintF("[%s]: %d pages, %d (%.1f%%) free\n",
+ AllocationSpaceName(space->identity()),
+ number_of_pages,
+ static_cast<int>(free),
+ static_cast<double>(free) * 100 / reserved);
+}
+
+
+bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
if (!compacting_) {
ASSERT(evacuation_candidates_.length() == 0);
CollectEvacuationCandidates(heap()->old_pointer_space());
CollectEvacuationCandidates(heap()->old_data_space());
- if (FLAG_compact_code_space) {
+ if (FLAG_compact_code_space && mode == NON_INCREMENTAL_COMPACTION) {
CollectEvacuationCandidates(heap()->code_space());
+ } else if (FLAG_trace_fragmentation) {
+ TraceFragmentation(heap()->code_space());
+ }
+
+ if (FLAG_trace_fragmentation) {
+ TraceFragmentation(heap()->map_space());
+ TraceFragmentation(heap()->cell_space());
}
heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists();
@@ -413,32 +435,201 @@ const char* AllocationSpaceName(AllocationSpace space) {
}
+// Returns zero for pages that have so little fragmentation that it is not
+// worth defragmenting them. Otherwise a positive integer that gives an
+// estimate of fragmentation on an arbitrary scale.
+static int FreeListFragmentation(PagedSpace* space, Page* p) {
+ // If page was not swept then there are no free list items on it.
+ if (!p->WasSwept()) {
+ if (FLAG_trace_fragmentation) {
+ PrintF("%p [%s]: %d bytes live (unswept)\n",
+ reinterpret_cast<void*>(p),
+ AllocationSpaceName(space->identity()),
+ p->LiveBytes());
+ }
+ return 0;
+ }
+
+ FreeList::SizeStats sizes;
+ space->CountFreeListItems(p, &sizes);
+
+ intptr_t ratio;
+ intptr_t ratio_threshold;
+ intptr_t area_size = space->AreaSize();
+ if (space->identity() == CODE_SPACE) {
+ ratio = (sizes.medium_size_ * 10 + sizes.large_size_ * 2) * 100 /
+ area_size;
+ ratio_threshold = 10;
+ } else {
+ ratio = (sizes.small_size_ * 5 + sizes.medium_size_) * 100 /
+ area_size;
+ ratio_threshold = 15;
+ }
+
+ if (FLAG_trace_fragmentation) {
+ PrintF("%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n",
+ reinterpret_cast<void*>(p),
+ AllocationSpaceName(space->identity()),
+ static_cast<int>(sizes.small_size_),
+ static_cast<double>(sizes.small_size_ * 100) /
+ area_size,
+ static_cast<int>(sizes.medium_size_),
+ static_cast<double>(sizes.medium_size_ * 100) /
+ area_size,
+ static_cast<int>(sizes.large_size_),
+ static_cast<double>(sizes.large_size_ * 100) /
+ area_size,
+ static_cast<int>(sizes.huge_size_),
+ static_cast<double>(sizes.huge_size_ * 100) /
+ area_size,
+ (ratio > ratio_threshold) ? "[fragmented]" : "");
+ }
+
+ if (FLAG_always_compact && sizes.Total() != area_size) {
+ return 1;
+ }
+
+ if (ratio <= ratio_threshold) return 0; // Not fragmented.
+
+ return static_cast<int>(ratio - ratio_threshold);
+}
+
+
void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
ASSERT(space->identity() == OLD_POINTER_SPACE ||
space->identity() == OLD_DATA_SPACE ||
space->identity() == CODE_SPACE);
- PageIterator it(space);
+ int number_of_pages = space->CountTotalPages();
+
+ const int kMaxMaxEvacuationCandidates = 1000;
+ int max_evacuation_candidates = Min(
+ kMaxMaxEvacuationCandidates,
+ static_cast<int>(sqrt(static_cast<double>(number_of_pages / 2)) + 1));
+
+ if (FLAG_stress_compaction || FLAG_always_compact) {
+ max_evacuation_candidates = kMaxMaxEvacuationCandidates;
+ }
+
+ class Candidate {
+ public:
+ Candidate() : fragmentation_(0), page_(NULL) { }
+ Candidate(int f, Page* p) : fragmentation_(f), page_(p) { }
+
+ int fragmentation() { return fragmentation_; }
+ Page* page() { return page_; }
+
+ private:
+ int fragmentation_;
+ Page* page_;
+ };
+
+ enum CompactionMode {
+ COMPACT_FREE_LISTS,
+ REDUCE_MEMORY_FOOTPRINT
+ };
+
+ CompactionMode mode = COMPACT_FREE_LISTS;
+
+ intptr_t reserved = number_of_pages * space->AreaSize();
+ intptr_t over_reserved = reserved - space->SizeOfObjects();
+ static const intptr_t kFreenessThreshold = 50;
+
+ if (over_reserved >= 2 * space->AreaSize() &&
+ reduce_memory_footprint_) {
+ mode = REDUCE_MEMORY_FOOTPRINT;
+
+ // We expect that empty pages are easier to compact so slightly bump the
+ // limit.
+ max_evacuation_candidates += 2;
+
+ if (FLAG_trace_fragmentation) {
+ PrintF("Estimated over reserved memory: %.1f MB (setting threshold %d)\n",
+ static_cast<double>(over_reserved) / MB,
+ static_cast<int>(kFreenessThreshold));
+ }
+ }
+
+ intptr_t estimated_release = 0;
+
+ Candidate candidates[kMaxMaxEvacuationCandidates];
+
int count = 0;
+ int fragmentation = 0;
+ Candidate* least = NULL;
+
+ PageIterator it(space);
if (it.has_next()) it.next(); // Never compact the first page.
+
while (it.has_next()) {
Page* p = it.next();
- bool evacuate = false;
+ p->ClearEvacuationCandidate();
+
if (FLAG_stress_compaction) {
int counter = space->heap()->ms_count();
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
- if ((counter & 1) == (page_number & 1)) evacuate = true;
+ if ((counter & 1) == (page_number & 1)) fragmentation = 1;
+ } else if (mode == REDUCE_MEMORY_FOOTPRINT) {
+ // Don't try to release too many pages.
+ if (estimated_release >= ((over_reserved * 3) / 4)) {
+ continue;
+ }
+
+ intptr_t free_bytes = 0;
+
+ if (!p->WasSwept()) {
+ free_bytes = (p->area_size() - p->LiveBytes());
+ } else {
+ FreeList::SizeStats sizes;
+ space->CountFreeListItems(p, &sizes);
+ free_bytes = sizes.Total();
+ }
+
+ int free_pct = static_cast<int>(free_bytes * 100) / p->area_size();
+
+ if (free_pct >= kFreenessThreshold) {
+ estimated_release += 2 * p->area_size() - free_bytes;
+ fragmentation = free_pct;
+ } else {
+ fragmentation = 0;
+ }
+
+ if (FLAG_trace_fragmentation) {
+ PrintF("%p [%s]: %d (%.2f%%) free %s\n",
+ reinterpret_cast<void*>(p),
+ AllocationSpaceName(space->identity()),
+ static_cast<int>(free_bytes),
+ static_cast<double>(free_bytes * 100) / p->area_size(),
+ (fragmentation > 0) ? "[fragmented]" : "");
+ }
} else {
- if (space->IsFragmented(p)) evacuate = true;
+ fragmentation = FreeListFragmentation(space, p);
}
- if (evacuate) {
- AddEvacuationCandidate(p);
- count++;
- } else {
- p->ClearEvacuationCandidate();
+
+ if (fragmentation != 0) {
+ if (count < max_evacuation_candidates) {
+ candidates[count++] = Candidate(fragmentation, p);
+ } else {
+ if (least == NULL) {
+ for (int i = 0; i < max_evacuation_candidates; i++) {
+ if (least == NULL ||
+ candidates[i].fragmentation() < least->fragmentation()) {
+ least = candidates + i;
+ }
+ }
+ }
+ if (least->fragmentation() < fragmentation) {
+ *least = Candidate(fragmentation, p);
+ least = NULL;
+ }
+ }
}
}
+ for (int i = 0; i < count; i++) {
+ AddEvacuationCandidate(candidates[i].page());
+ }
+
if (count > 0 && FLAG_trace_fragmentation) {
PrintF("Collected %d evacuation candidates for space %s\n",
count,
@@ -472,6 +663,12 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
// order which is not implemented for incremental marking.
collect_maps_ = FLAG_collect_maps && !was_marked_incrementally_;
+ // Monomorphic ICs are preserved when possible, but need to be flushed
+ // when they might be keeping a Context alive, or when the heap is about
+ // to be serialized.
+ flush_monomorphic_ics_ =
+ heap()->isolate()->context_exit_happened() || Serializer::enabled();
+
// Rather than passing the tracer around we stash it in a static member
// variable.
tracer_ = tracer;
@@ -491,8 +688,8 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
}
#endif
- // Clear marking bits for precise sweeping to collect all garbage.
- if (was_marked_incrementally_ && PreciseSweepingRequired()) {
+ // Clear marking bits if incremental marking is aborted.
+ if (was_marked_incrementally_ && abort_incremental_marking_) {
heap()->incremental_marking()->Abort();
ClearMarkbits();
AbortCompaction();
@@ -502,7 +699,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
// Don't start compaction if we are in the middle of incremental
// marking cycle. We did not collect any slots.
if (!FLAG_never_compact && !was_marked_incrementally_) {
- StartCompaction();
+ StartCompaction(NON_INCREMENTAL_COMPACTION);
}
PagedSpaces spaces;
@@ -576,8 +773,7 @@ class CodeFlusher {
}
void AddCandidate(JSFunction* function) {
- ASSERT(function->unchecked_code() ==
- function->unchecked_shared()->unchecked_code());
+ ASSERT(function->code() == function->shared()->code());
SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function;
@@ -597,15 +793,15 @@ class CodeFlusher {
while (candidate != NULL) {
next_candidate = GetNextCandidate(candidate);
- SharedFunctionInfo* shared = candidate->unchecked_shared();
+ SharedFunctionInfo* shared = candidate->shared();
- Code* code = shared->unchecked_code();
+ Code* code = shared->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) {
shared->set_code(lazy_compile);
candidate->set_code(lazy_compile);
} else {
- candidate->set_code(shared->unchecked_code());
+ candidate->set_code(shared->code());
}
// We are in the middle of a GC cycle so the write barrier in the code
@@ -615,6 +811,8 @@ class CodeFlusher {
isolate_->heap()->mark_compact_collector()->
RecordCodeEntrySlot(slot, target);
+ RecordSharedFunctionInfoCodeSlot(shared);
+
candidate = next_candidate;
}
@@ -631,18 +829,27 @@ class CodeFlusher {
next_candidate = GetNextCandidate(candidate);
SetNextCandidate(candidate, NULL);
- Code* code = candidate->unchecked_code();
+ Code* code = candidate->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) {
candidate->set_code(lazy_compile);
}
+ RecordSharedFunctionInfoCodeSlot(candidate);
+
candidate = next_candidate;
}
shared_function_info_candidates_head_ = NULL;
}
+ void RecordSharedFunctionInfoCodeSlot(SharedFunctionInfo* shared) {
+ Object** slot = HeapObject::RawField(shared,
+ SharedFunctionInfo::kCodeOffset);
+ isolate_->heap()->mark_compact_collector()->
+ RecordSlot(slot, slot, HeapObject::cast(*slot));
+ }
+
static JSFunction** GetNextCandidateField(JSFunction* candidate) {
return reinterpret_cast<JSFunction**>(
candidate->address() + JSFunction::kCodeEntryOffset);
@@ -659,18 +866,19 @@ class CodeFlusher {
static SharedFunctionInfo** GetNextCandidateField(
SharedFunctionInfo* candidate) {
- Code* code = candidate->unchecked_code();
+ Code* code = candidate->code();
return reinterpret_cast<SharedFunctionInfo**>(
- code->address() + Code::kNextCodeFlushingCandidateOffset);
+ code->address() + Code::kGCMetadataOffset);
}
static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) {
- return *GetNextCandidateField(candidate);
+ return reinterpret_cast<SharedFunctionInfo*>(
+ candidate->code()->gc_metadata());
}
static void SetNextCandidate(SharedFunctionInfo* candidate,
SharedFunctionInfo* next_candidate) {
- *GetNextCandidateField(candidate) = next_candidate;
+ candidate->code()->set_gc_metadata(next_candidate);
}
Isolate* isolate_;
@@ -695,7 +903,7 @@ static inline HeapObject* ShortCircuitConsString(Object** p) {
// it in place to its left substring. Return the updated value.
//
// Here we assume that if we change *p, we replace it with a heap object
- // (ie, the left substring of a cons string is always a heap object).
+ // (i.e., the left substring of a cons string is always a heap object).
//
// The check performed is:
// object->IsConsString() && !object->IsSymbol() &&
@@ -839,21 +1047,15 @@ class StaticMarkingVisitor : public StaticVisitorBase {
static inline void VisitCodeTarget(Heap* heap, RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()) {
+ if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
+ && (target->ic_state() == MEGAMORPHIC ||
+ heap->mark_compact_collector()->flush_monomorphic_ics_ ||
+ target->ic_age() != heap->global_ic_age())) {
IC::Clear(rinfo->pc());
- // Please note targets for cleared inline cached do not have to be
- // marked since they are contained in HEAP->non_monomorphic_cache().
target = Code::GetCodeFromTargetAddress(rinfo->target_address());
- } else {
- if (FLAG_cleanup_code_caches_at_gc &&
- target->kind() == Code::STUB &&
- target->major_key() == CodeStub::CallFunction &&
- target->has_function_cache()) {
- CallFunctionStub::Clear(heap, rinfo->pc());
- }
- MarkBit code_mark = Marking::MarkBitFrom(target);
- heap->mark_compact_collector()->MarkObject(target, code_mark);
}
+ MarkBit code_mark = Marking::MarkBitFrom(target);
+ heap->mark_compact_collector()->MarkObject(target, code_mark);
heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
}
@@ -921,6 +1123,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
}
static inline void VisitExternalReference(Address* p) { }
+ static inline void VisitExternalReference(RelocInfo* rinfo) { }
static inline void VisitRuntimeEntry(RelocInfo* rinfo) { }
private:
@@ -965,17 +1168,29 @@ class StaticMarkingVisitor : public StaticVisitorBase {
object_size);
// Mark the backing hash table without pushing it on the marking stack.
- ASSERT(!MarkCompactCollector::IsMarked(weak_map->unchecked_table()));
- ASSERT(MarkCompactCollector::IsMarked(weak_map->unchecked_table()->map()));
-
- HeapObject* unchecked_table = weak_map->unchecked_table();
- MarkBit mark_bit = Marking::MarkBitFrom(unchecked_table);
- collector->SetMark(unchecked_table, mark_bit);
+ ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
+ ASSERT(!MarkCompactCollector::IsMarked(table));
+ collector->SetMark(table, Marking::MarkBitFrom(table));
+ collector->MarkObject(table->map(), Marking::MarkBitFrom(table->map()));
+ ASSERT(MarkCompactCollector::IsMarked(table->map()));
}
static void VisitCode(Map* map, HeapObject* object) {
- reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
- map->GetHeap());
+ Heap* heap = map->GetHeap();
+ Code* code = reinterpret_cast<Code*>(object);
+ if (FLAG_cleanup_code_caches_at_gc) {
+ Object* raw_info = code->type_feedback_info();
+ if (raw_info->IsTypeFeedbackInfo()) {
+ TypeFeedbackCells* type_feedback_cells =
+ TypeFeedbackInfo::cast(raw_info)->type_feedback_cells();
+ for (int i = 0; i < type_feedback_cells->CellCount(); i++) {
+ ASSERT(type_feedback_cells->AstId(i)->IsSmi());
+ JSGlobalPropertyCell* cell = type_feedback_cells->Cell(i);
+ cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap));
+ }
+ }
+ }
+ code->CodeIterateBody<StaticMarkingVisitor>(heap);
}
// Code flushing support.
@@ -994,12 +1209,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
inline static bool IsCompiled(JSFunction* function) {
- return function->unchecked_code() !=
+ return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
}
inline static bool IsCompiled(SharedFunctionInfo* function) {
- return function->unchecked_code() !=
+ return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
}
@@ -1008,15 +1223,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
- MarkBit code_mark =
- Marking::MarkBitFrom(function->unchecked_code());
+ MarkBit code_mark = Marking::MarkBitFrom(function->code());
if (code_mark.Get()) {
- shared_info->set_code_age(0);
+ if (!Marking::MarkBitFrom(shared_info).Get()) {
+ shared_info->set_code_age(0);
+ }
return false;
}
// We do not flush code for optimized functions.
- if (function->code() != shared_info->unchecked_code()) {
+ if (function->code() != shared_info->code()) {
return false;
}
@@ -1027,9 +1243,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
MarkBit code_mark =
- Marking::MarkBitFrom(shared_info->unchecked_code());
+ Marking::MarkBitFrom(shared_info->code());
if (code_mark.Get()) {
- shared_info->set_code_age(0);
return false;
}
@@ -1041,16 +1256,24 @@ class StaticMarkingVisitor : public StaticVisitorBase {
// We never flush code for Api functions.
Object* function_data = shared_info->function_data();
- if (function_data->IsFunctionTemplateInfo()) return false;
+ if (function_data->IsFunctionTemplateInfo()) {
+ return false;
+ }
// Only flush code for functions.
- if (shared_info->code()->kind() != Code::FUNCTION) return false;
+ if (shared_info->code()->kind() != Code::FUNCTION) {
+ return false;
+ }
// Function must be lazy compilable.
- if (!shared_info->allows_lazy_compilation()) return false;
+ if (!shared_info->allows_lazy_compilation()) {
+ return false;
+ }
// If this is a full script wrapped in a function we do no flush the code.
- if (shared_info->is_toplevel()) return false;
+ if (shared_info->is_toplevel()) {
+ return false;
+ }
// Age this shared function info.
if (shared_info->code_age() < kCodeAgeThreshold) {
@@ -1109,6 +1332,16 @@ class StaticMarkingVisitor : public StaticVisitorBase {
re->SetDataAtUnchecked(JSRegExp::saved_code_index(is_ascii),
code,
heap);
+
+ // Saving a copy might create a pointer into compaction candidate
+ // that was not observed by marker. This might happen if JSRegExp data
+ // was marked through the compilation cache before marker reached JSRegExp
+ // object.
+ FixedArray* data = FixedArray::cast(re->data());
+ Object** slot = data->data_start() + JSRegExp::saved_code_index(is_ascii);
+ heap->mark_compact_collector()->
+ RecordSlot(slot, slot, code);
+
// Set a number in the 0-255 range to guarantee no smi overflow.
re->SetDataAtUnchecked(JSRegExp::code_index(is_ascii),
Smi::FromInt(heap->sweep_generation() & 0xff),
@@ -1147,7 +1380,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
return;
}
JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
- // Flush code or set age on both ascii and two byte code.
+ // Flush code or set age on both ASCII and two byte code.
UpdateRegExpCodeAgeAndFlush(heap, re, true);
UpdateRegExpCodeAgeAndFlush(heap, re, false);
// Visit the fields of the RegExp, including the updated FixedArray.
@@ -1173,6 +1406,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap();
+ if (shared->ic_age() != heap->global_ic_age()) {
+ shared->ResetForNewContext(heap->global_ic_age());
+ }
+
if (!known_flush_code_candidate) {
known_flush_code_candidate = IsFlushable(heap, shared);
if (known_flush_code_candidate) {
@@ -1223,30 +1460,12 @@ class StaticMarkingVisitor : public StaticVisitorBase {
}
if (!flush_code_candidate) {
- Code* code = jsfunction->unchecked_shared()->unchecked_code();
+ Code* code = jsfunction->shared()->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
- heap->mark_compact_collector()->MarkObject(code, code_mark);
-
- if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) {
- // For optimized functions we should retain both non-optimized version
- // of it's code and non-optimized version of all inlined functions.
- // This is required to support bailing out from inlined code.
- DeoptimizationInputData* data =
- reinterpret_cast<DeoptimizationInputData*>(
- jsfunction->unchecked_code()->unchecked_deoptimization_data());
-
- FixedArray* literals = data->UncheckedLiteralArray();
-
- for (int i = 0, count = data->InlinedFunctionCount()->value();
- i < count;
- i++) {
- JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
- Code* inlined_code = inlined->unchecked_shared()->unchecked_code();
- MarkBit inlined_code_mark =
- Marking::MarkBitFrom(inlined_code);
- heap->mark_compact_collector()->MarkObject(
- inlined_code, inlined_code_mark);
- }
+ collector->MarkObject(code, code_mark);
+
+ if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
+ collector->MarkInlinedFunctionsCode(jsfunction->code());
}
}
@@ -1371,11 +1590,7 @@ class CodeMarkingVisitor : public ThreadVisitor {
: collector_(collector) {}
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
- for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
- Code* code = it.frame()->unchecked_code();
- MarkBit code_bit = Marking::MarkBitFrom(code);
- collector_->MarkObject(it.frame()->unchecked_code(), code_bit);
- }
+ collector_->PrepareThreadForCodeFlushing(isolate, top);
}
private:
@@ -1397,8 +1612,8 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
if (obj->IsSharedFunctionInfo()) {
SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj);
MarkBit shared_mark = Marking::MarkBitFrom(shared);
- MarkBit code_mark = Marking::MarkBitFrom(shared->unchecked_code());
- collector_->MarkObject(shared->unchecked_code(), code_mark);
+ MarkBit code_mark = Marking::MarkBitFrom(shared->code());
+ collector_->MarkObject(shared->code(), code_mark);
collector_->MarkObject(shared, shared_mark);
}
}
@@ -1408,6 +1623,44 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
};
+void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
+ // For optimized functions we should retain both non-optimized version
+ // of it's code and non-optimized version of all inlined functions.
+ // This is required to support bailing out from inlined code.
+ DeoptimizationInputData* data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+
+ FixedArray* literals = data->LiteralArray();
+
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
+ i < count;
+ i++) {
+ JSFunction* inlined = JSFunction::cast(literals->get(i));
+ Code* inlined_code = inlined->shared()->code();
+ MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
+ MarkObject(inlined_code, inlined_code_mark);
+ }
+}
+
+
+void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
+ ThreadLocalTop* top) {
+ for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
+ // Note: for the frame that has a pending lazy deoptimization
+ // StackFrame::unchecked_code will return a non-optimized code object for
+ // the outermost function and StackFrame::LookupCode will return
+ // actual optimized code object.
+ StackFrame* frame = it.frame();
+ Code* code = frame->unchecked_code();
+ MarkBit code_mark = Marking::MarkBitFrom(code);
+ MarkObject(code, code_mark);
+ if (frame->is_optimized()) {
+ MarkInlinedFunctionsCode(frame->LookupCode());
+ }
+ }
+}
+
+
void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap());
@@ -1435,11 +1688,8 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
// Make sure we are not referencing the code from the stack.
ASSERT(this == heap()->mark_compact_collector());
- for (StackFrameIterator it; !it.done(); it.Advance()) {
- Code* code = it.frame()->unchecked_code();
- MarkBit code_mark = Marking::MarkBitFrom(code);
- MarkObject(code, code_mark);
- }
+ PrepareThreadForCodeFlushing(heap()->isolate(),
+ heap()->isolate()->thread_local_top());
// Iterate the archived stacks in all threads to check if
// the code is referenced.
@@ -1513,12 +1763,11 @@ class SymbolTableCleaner : public ObjectVisitor {
// Since no objects have yet been moved we can safely access the map of
// the object.
- if (o->IsExternalString() ||
- (o->IsHeapObject() && HeapObject::cast(o)->map()->has_external_resource())) {
- heap_->FinalizeExternalString(HeapObject::cast(*p));
+ if (o->IsExternalString()) {
+ heap_->FinalizeExternalString(String::cast(*p));
}
- // Set the entry to null_value (as deleted).
- *p = heap_->null_value();
+ // Set the entry to the_hole_value (as deleted).
+ *p = heap_->the_hole_value();
pointers_removed_++;
}
}
@@ -1553,9 +1802,7 @@ void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
ASSERT(HEAP->Contains(object));
if (object->IsMap()) {
Map* map = Map::cast(object);
- if (FLAG_cleanup_code_caches_at_gc) {
- map->ClearCodeCache(heap());
- }
+ heap_->ClearCacheOnMap(map);
// When map collection is enabled we have to mark through map's transitions
// in a special way to make transition links weak.
@@ -1580,8 +1827,8 @@ void MarkCompactCollector::MarkMapContents(Map* map) {
MarkBit mark = Marking::MarkBitFrom(prototype_transitions);
if (!mark.Get()) {
mark.Set();
- MemoryChunk::IncrementLiveBytes(prototype_transitions->address(),
- prototype_transitions->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(prototype_transitions->address(),
+ prototype_transitions->Size());
}
Object** raw_descriptor_array_slot =
@@ -1604,6 +1851,16 @@ void MarkCompactCollector::MarkMapContents(Map* map) {
}
+void MarkCompactCollector::MarkAccessorPairSlot(HeapObject* accessors,
+ int offset) {
+ Object** slot = HeapObject::RawField(accessors, offset);
+ HeapObject* accessor = HeapObject::cast(*slot);
+ if (accessor->IsMap()) return;
+ RecordSlot(slot, slot, accessor);
+ MarkObjectAndPush(accessor);
+}
+
+
void MarkCompactCollector::MarkDescriptorArray(
DescriptorArray* descriptors) {
MarkBit descriptors_mark = Marking::MarkBitFrom(descriptors);
@@ -1631,28 +1888,37 @@ void MarkCompactCollector::MarkDescriptorArray(
PropertyDetails details(Smi::cast(contents->get(i + 1)));
Object** slot = contents->data_start() + i;
- Object* value = *slot;
- if (!value->IsHeapObject()) continue;
+ if (!(*slot)->IsHeapObject()) continue;
+ HeapObject* value = HeapObject::cast(*slot);
RecordSlot(slot, slot, *slot);
- PropertyType type = details.type();
- if (type < FIRST_PHANTOM_PROPERTY_TYPE) {
- HeapObject* object = HeapObject::cast(value);
- MarkBit mark = Marking::MarkBitFrom(HeapObject::cast(object));
- if (!mark.Get()) {
- SetMark(HeapObject::cast(object), mark);
- marking_deque_.PushBlack(object);
- }
- } else if (type == ELEMENTS_TRANSITION && value->IsFixedArray()) {
- // For maps with multiple elements transitions, the transition maps are
- // stored in a FixedArray. Keep the fixed array alive but not the maps
- // that it refers to.
- HeapObject* object = HeapObject::cast(value);
- MarkBit mark = Marking::MarkBitFrom(HeapObject::cast(object));
- if (!mark.Get()) {
- SetMark(HeapObject::cast(object), mark);
- }
+ switch (details.type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ MarkObjectAndPush(value);
+ break;
+ case CALLBACKS:
+ if (!value->IsAccessorPair()) {
+ MarkObjectAndPush(value);
+ } else if (!MarkObjectWithoutPush(value)) {
+ MarkAccessorPairSlot(value, AccessorPair::kGetterOffset);
+ MarkAccessorPairSlot(value, AccessorPair::kSetterOffset);
+ }
+ break;
+ case ELEMENTS_TRANSITION:
+ // For maps with multiple elements transitions, the transition maps are
+ // stored in a FixedArray. Keep the fixed array alive but not the maps
+ // that it refers to.
+ if (value->IsFixedArray()) MarkObjectWithoutPush(value);
+ break;
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ break;
}
}
// The DescriptorArray descriptors contains a pointer to its contents array,
@@ -1696,7 +1962,7 @@ static void DiscoverGreyObjectsWithIterator(Heap* heap,
MarkBit markbit = Marking::MarkBitFrom(object);
if ((object->map() != filler_map) && Marking::IsGrey(markbit)) {
Marking::GreyToBlack(markbit);
- MemoryChunk::IncrementLiveBytes(object->address(), object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size());
marking_deque->PushBlack(object);
if (marking_deque->IsFull()) return;
}
@@ -1718,12 +1984,15 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
int last_cell_index =
Bitmap::IndexToCell(
Bitmap::CellAlignIndex(
- p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
+ p->AddressToMarkbitIndex(p->area_end())));
+
+ Address cell_base = p->area_start();
+ int cell_index = Bitmap::IndexToCell(
+ Bitmap::CellAlignIndex(
+ p->AddressToMarkbitIndex(cell_base)));
- int cell_index = Page::kFirstUsedCell;
- Address cell_base = p->ObjectAreaStart();
- for (cell_index = Page::kFirstUsedCell;
+ for (;
cell_index < last_cell_index;
cell_index++, cell_base += 32 * kPointerSize) {
ASSERT((unsigned)cell_index ==
@@ -1748,7 +2017,7 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque, Page* p) {
Marking::GreyToBlack(markbit);
Address addr = cell_base + offset * kPointerSize;
HeapObject* object = HeapObject::FromAddress(addr);
- MemoryChunk::IncrementLiveBytes(object->address(), object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(object->address(), object->Size());
marking_deque->PushBlack(object);
if (marking_deque->IsFull()) return;
offset += 2;
@@ -2039,6 +2308,24 @@ void MarkCompactCollector::MarkLiveObjects() {
PrepareForCodeFlushing();
+ if (was_marked_incrementally_) {
+ // There is no write barrier on cells so we have to scan them now at the end
+ // of the incremental marking.
+ {
+ HeapObjectIterator cell_iterator(heap()->cell_space());
+ HeapObject* cell;
+ while ((cell = cell_iterator.Next()) != NULL) {
+ ASSERT(cell->IsJSGlobalPropertyCell());
+ if (IsMarked(cell)) {
+ int offset = JSGlobalPropertyCell::kValueOffset;
+ StaticMarkingVisitor::VisitPointer(
+ heap(),
+ reinterpret_cast<Object**>(cell->address() + offset));
+ }
+ }
+ }
+ }
+
RootMarkingVisitor root_visitor(heap());
MarkRoots(&root_visitor);
@@ -2100,8 +2387,10 @@ void MarkCompactCollector::AfterMarking() {
code_flusher_->ProcessCandidates();
}
- // Clean up dead objects from the runtime profiler.
- heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ if (!FLAG_watch_ic_patching) {
+ // Clean up dead objects from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->RemoveDeadSamples();
+ }
}
@@ -2125,7 +2414,7 @@ void MarkCompactCollector::ProcessMapCaches() {
i += MapCache::kEntrySize) {
Object* raw_key = map_cache->get(i);
if (raw_key == heap()->undefined_value() ||
- raw_key == heap()->null_value()) continue;
+ raw_key == heap()->the_hole_value()) continue;
STATIC_ASSERT(MapCache::kEntrySize == 2);
Object* raw_map = map_cache->get(i + 1);
if (raw_map->IsHeapObject() && IsMarked(raw_map)) {
@@ -2133,8 +2422,8 @@ void MarkCompactCollector::ProcessMapCaches() {
} else {
// Delete useless entries with unmarked maps.
ASSERT(raw_map->IsMap());
- map_cache->set_null_unchecked(heap(), i);
- map_cache->set_null_unchecked(heap(), i + 1);
+ map_cache->set_the_hole(i);
+ map_cache->set_the_hole(i + 1);
}
}
if (used_elements == 0) {
@@ -2205,89 +2494,92 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
map->unchecked_constructor()->unchecked_shared()->AttachInitialMap(map);
}
- // Clear dead prototype transitions.
- int number_of_transitions = map->NumberOfProtoTransitions();
- FixedArray* prototype_transitions = map->prototype_transitions();
-
- int new_number_of_transitions = 0;
- const int header = Map::kProtoTransitionHeaderSize;
- const int proto_offset =
- header + Map::kProtoTransitionPrototypeOffset;
- const int map_offset = header + Map::kProtoTransitionMapOffset;
- const int step = Map::kProtoTransitionElementsPerEntry;
- for (int i = 0; i < number_of_transitions; i++) {
- Object* prototype = prototype_transitions->get(proto_offset + i * step);
- Object* cached_map = prototype_transitions->get(map_offset + i * step);
- if (IsMarked(prototype) && IsMarked(cached_map)) {
- if (new_number_of_transitions != i) {
- prototype_transitions->set_unchecked(
- heap_,
- proto_offset + new_number_of_transitions * step,
- prototype,
- UPDATE_WRITE_BARRIER);
- prototype_transitions->set_unchecked(
- heap_,
- map_offset + new_number_of_transitions * step,
- cached_map,
- SKIP_WRITE_BARRIER);
- }
- }
+ ClearNonLivePrototypeTransitions(map);
+ ClearNonLiveMapTransitions(map, map_mark);
+ }
+}
- // Fill slots that became free with undefined value.
- Object* undefined = heap()->undefined_value();
- for (int i = new_number_of_transitions * step;
- i < number_of_transitions * step;
- i++) {
- // The undefined object is on a page that is never compacted and never
- // in new space so it is OK to skip the write barrier. Also it's a
- // root.
- prototype_transitions->set_unchecked(heap_,
- header + i,
- undefined,
- SKIP_WRITE_BARRIER);
-
- Object** undefined_slot =
- prototype_transitions->data_start() + i;
- RecordSlot(undefined_slot, undefined_slot, undefined);
- }
- map->SetNumberOfProtoTransitions(new_number_of_transitions);
- }
-
- // Follow the chain of back pointers to find the prototype.
- Map* current = map;
- while (current->IsMap()) {
- current = reinterpret_cast<Map*>(current->prototype());
- ASSERT(current->IsHeapObject());
- }
- Object* real_prototype = current;
-
- // Follow back pointers, setting them to prototype,
- // clearing map transitions when necessary.
- current = map;
- bool on_dead_path = !map_mark.Get();
- Object* next;
- while (current->IsMap()) {
- next = current->prototype();
- // There should never be a dead map above a live map.
- MarkBit current_mark = Marking::MarkBitFrom(current);
- bool is_alive = current_mark.Get();
- ASSERT(on_dead_path || is_alive);
-
- // A live map above a dead map indicates a dead transition.
- // This test will always be false on the first iteration.
- if (on_dead_path && is_alive) {
- on_dead_path = false;
- current->ClearNonLiveTransitions(heap(), real_prototype);
- }
- *HeapObject::RawField(current, Map::kPrototypeOffset) =
- real_prototype;
- if (is_alive) {
- Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset);
- RecordSlot(slot, slot, real_prototype);
+void MarkCompactCollector::ClearNonLivePrototypeTransitions(Map* map) {
+ int number_of_transitions = map->NumberOfProtoTransitions();
+ FixedArray* prototype_transitions = map->prototype_transitions();
+
+ int new_number_of_transitions = 0;
+ const int header = Map::kProtoTransitionHeaderSize;
+ const int proto_offset = header + Map::kProtoTransitionPrototypeOffset;
+ const int map_offset = header + Map::kProtoTransitionMapOffset;
+ const int step = Map::kProtoTransitionElementsPerEntry;
+ for (int i = 0; i < number_of_transitions; i++) {
+ Object* prototype = prototype_transitions->get(proto_offset + i * step);
+ Object* cached_map = prototype_transitions->get(map_offset + i * step);
+ if (IsMarked(prototype) && IsMarked(cached_map)) {
+ int proto_index = proto_offset + new_number_of_transitions * step;
+ int map_index = map_offset + new_number_of_transitions * step;
+ if (new_number_of_transitions != i) {
+ prototype_transitions->set_unchecked(
+ heap_,
+ proto_index,
+ prototype,
+ UPDATE_WRITE_BARRIER);
+ prototype_transitions->set_unchecked(
+ heap_,
+ map_index,
+ cached_map,
+ SKIP_WRITE_BARRIER);
}
- current = reinterpret_cast<Map*>(next);
+ Object** slot =
+ HeapObject::RawField(prototype_transitions,
+ FixedArray::OffsetOfElementAt(proto_index));
+ RecordSlot(slot, slot, prototype);
+ new_number_of_transitions++;
+ }
+ }
+
+ if (new_number_of_transitions != number_of_transitions) {
+ map->SetNumberOfProtoTransitions(new_number_of_transitions);
+ }
+
+ // Fill slots that became free with undefined value.
+ for (int i = new_number_of_transitions * step;
+ i < number_of_transitions * step;
+ i++) {
+ prototype_transitions->set_undefined(heap_, header + i);
+ }
+}
+
+
+void MarkCompactCollector::ClearNonLiveMapTransitions(Map* map,
+ MarkBit map_mark) {
+ // Follow the chain of back pointers to find the prototype.
+ Object* real_prototype = map;
+ while (real_prototype->IsMap()) {
+ real_prototype = Map::cast(real_prototype)->prototype();
+ ASSERT(real_prototype->IsHeapObject());
+ }
+
+ // Follow back pointers, setting them to prototype, clearing map transitions
+ // when necessary.
+ Map* current = map;
+ bool current_is_alive = map_mark.Get();
+ bool on_dead_path = !current_is_alive;
+ while (current->IsMap()) {
+ Object* next = current->prototype();
+ // There should never be a dead map above a live map.
+ ASSERT(on_dead_path || current_is_alive);
+
+ // A live map above a dead map indicates a dead transition. This test will
+ // always be false on the first iteration.
+ if (on_dead_path && current_is_alive) {
+ on_dead_path = false;
+ current->ClearNonLiveTransitions(heap(), real_prototype);
}
+
+ Object** slot = HeapObject::RawField(current, Map::kPrototypeOffset);
+ *slot = real_prototype;
+ if (current_is_alive) RecordSlot(slot, slot, real_prototype);
+
+ current = reinterpret_cast<Map*>(next);
+ current_is_alive = Marking::MarkBitFrom(current).Get();
}
}
@@ -2297,7 +2589,7 @@ void MarkCompactCollector::ProcessWeakMaps() {
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj)));
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
- ObjectHashTable* table = weak_map->unchecked_table();
+ ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
for (int i = 0; i < table->Capacity(); i++) {
if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
Object* value = table->get(table->EntryToValueIndex(i));
@@ -2318,10 +2610,10 @@ void MarkCompactCollector::ClearWeakMaps() {
while (weak_map_obj != Smi::FromInt(0)) {
ASSERT(MarkCompactCollector::IsMarked(HeapObject::cast(weak_map_obj)));
JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
- ObjectHashTable* table = weak_map->unchecked_table();
+ ObjectHashTable* table = ObjectHashTable::cast(weak_map->table());
for (int i = 0; i < table->Capacity(); i++) {
if (!MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
- table->RemoveEntry(i, heap());
+ table->RemoveEntry(i);
}
}
weak_map_obj = weak_map->next();
@@ -2488,15 +2780,15 @@ static void UpdatePointer(HeapObject** p, HeapObject* object) {
}
-static HeapObject* UpdateReferenceInExternalStringTableEntry(Heap* heap,
- Object** p) {
+static String* UpdateReferenceInExternalStringTableEntry(Heap* heap,
+ Object** p) {
MapWord map_word = HeapObject::cast(*p)->map_word();
if (map_word.IsForwardingAddress()) {
- return HeapObject::cast(map_word.ToForwardingAddress());
+ return String::cast(map_word.ToForwardingAddress());
}
- return HeapObject::cast(*p);
+ return String::cast(*p);
}
@@ -2504,7 +2796,7 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
int object_size) {
Object* result;
- if (object_size > heap()->MaxObjectSizeInPagedSpace()) {
+ if (object_size > Page::kMaxNonCodeHeapObjectSize) {
MaybeObject* maybe_result =
heap()->lo_space()->AllocateRaw(object_size, NOT_EXECUTABLE);
if (maybe_result->ToObject(&result)) {
@@ -2540,6 +2832,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
void MarkCompactCollector::EvacuateNewSpace() {
+ // There are soft limits in the allocation code, designed trigger a mark
+ // sweep collection by failing allocations. But since we are already in
+ // a mark-sweep allocation, there is no sense in trying to trigger one.
+ AlwaysAllocateScope scope;
heap()->CheckNewSpaceExpansionCriteria();
NewSpace* new_space = heap()->new_space();
@@ -2618,13 +2914,16 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
int last_cell_index =
Bitmap::IndexToCell(
Bitmap::CellAlignIndex(
- p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
+ p->AddressToMarkbitIndex(p->area_end())));
+
+ Address cell_base = p->area_start();
+ int cell_index = Bitmap::IndexToCell(
+ Bitmap::CellAlignIndex(
+ p->AddressToMarkbitIndex(cell_base)));
- int cell_index = Page::kFirstUsedCell;
- Address cell_base = p->ObjectAreaStart();
int offsets[16];
- for (cell_index = Page::kFirstUsedCell;
+ for (;
cell_index < last_cell_index;
cell_index++, cell_base += 32 * kPointerSize) {
ASSERT((unsigned)cell_index ==
@@ -2779,12 +3078,16 @@ static void SweepPrecisely(PagedSpace* space,
int last_cell_index =
Bitmap::IndexToCell(
Bitmap::CellAlignIndex(
- p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
+ p->AddressToMarkbitIndex(p->area_end())));
+
+ Address free_start = p->area_start();
+ int cell_index =
+ Bitmap::IndexToCell(
+ Bitmap::CellAlignIndex(
+ p->AddressToMarkbitIndex(free_start)));
- int cell_index = Page::kFirstUsedCell;
- Address free_start = p->ObjectAreaStart();
ASSERT(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
- Address object_address = p->ObjectAreaStart();
+ Address object_address = free_start;
int offsets[16];
SkipList* skip_list = p->skip_list();
@@ -2793,7 +3096,7 @@ static void SweepPrecisely(PagedSpace* space,
skip_list->Clear();
}
- for (cell_index = Page::kFirstUsedCell;
+ for (;
cell_index < last_cell_index;
cell_index++, object_address += 32 * kPointerSize) {
ASSERT((unsigned)cell_index ==
@@ -2830,8 +3133,8 @@ static void SweepPrecisely(PagedSpace* space,
// Clear marking bits for current cell.
cells[cell_index] = 0;
}
- if (free_start != p->ObjectAreaEnd()) {
- space->Free(free_start, static_cast<int>(p->ObjectAreaEnd() - free_start));
+ if (free_start != p->area_end()) {
+ space->Free(free_start, static_cast<int>(p->area_end() - free_start));
}
p->ResetLiveBytes();
}
@@ -2954,109 +3257,131 @@ void MarkCompactCollector::ProcessInvalidatedCode(ObjectVisitor* visitor) {
void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
- bool code_slots_filtering_required = MarkInvalidatedCode();
+ bool code_slots_filtering_required;
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
+ code_slots_filtering_required = MarkInvalidatedCode();
+
+ EvacuateNewSpace();
+ }
- EvacuateNewSpace();
- EvacuatePages();
+
+ { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_EVACUATE_PAGES);
+ EvacuatePages();
+ }
// Second pass: find pointers to new space and update them.
PointersUpdatingVisitor updating_visitor(heap());
- // Update pointers in to space.
- SemiSpaceIterator to_it(heap()->new_space()->bottom(),
- heap()->new_space()->top());
- for (HeapObject* object = to_it.Next();
- object != NULL;
- object = to_it.Next()) {
- Map* map = object->map();
- object->IterateBody(map->instance_type(),
- object->SizeFromMap(map),
- &updating_visitor);
+ { GCTracer::Scope gc_scope(tracer_,
+ GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS);
+ // Update pointers in to space.
+ SemiSpaceIterator to_it(heap()->new_space()->bottom(),
+ heap()->new_space()->top());
+ for (HeapObject* object = to_it.Next();
+ object != NULL;
+ object = to_it.Next()) {
+ Map* map = object->map();
+ object->IterateBody(map->instance_type(),
+ object->SizeFromMap(map),
+ &updating_visitor);
+ }
}
- // Update roots.
- heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
- LiveObjectList::IterateElements(&updating_visitor);
+ { GCTracer::Scope gc_scope(tracer_,
+ GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS);
+ // Update roots.
+ heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
+ LiveObjectList::IterateElements(&updating_visitor);
+ }
- {
+ { GCTracer::Scope gc_scope(tracer_,
+ GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS);
StoreBufferRebuildScope scope(heap_,
heap_->store_buffer(),
&Heap::ScavengeStoreBufferCallback);
heap_->store_buffer()->IteratePointersToNewSpace(&UpdatePointer);
}
- SlotsBuffer::UpdateSlotsRecordedIn(heap_,
- migration_slots_buffer_,
- code_slots_filtering_required);
- if (FLAG_trace_fragmentation) {
- PrintF(" migration slots buffer: %d\n",
- SlotsBuffer::SizeOfChain(migration_slots_buffer_));
- }
-
- if (compacting_ && was_marked_incrementally_) {
- // It's difficult to filter out slots recorded for large objects.
- LargeObjectIterator it(heap_->lo_space());
- for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
- // LargeObjectSpace is not swept yet thus we have to skip
- // dead objects explicitly.
- if (!IsMarked(obj)) continue;
-
- Page* p = Page::FromAddress(obj->address());
- if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
- obj->Iterate(&updating_visitor);
- p->ClearFlag(Page::RESCAN_ON_EVACUATION);
+ { GCTracer::Scope gc_scope(tracer_,
+ GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED);
+ SlotsBuffer::UpdateSlotsRecordedIn(heap_,
+ migration_slots_buffer_,
+ code_slots_filtering_required);
+ if (FLAG_trace_fragmentation) {
+ PrintF(" migration slots buffer: %d\n",
+ SlotsBuffer::SizeOfChain(migration_slots_buffer_));
+ }
+
+ if (compacting_ && was_marked_incrementally_) {
+ // It's difficult to filter out slots recorded for large objects.
+ LargeObjectIterator it(heap_->lo_space());
+ for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
+ // LargeObjectSpace is not swept yet thus we have to skip
+ // dead objects explicitly.
+ if (!IsMarked(obj)) continue;
+
+ Page* p = Page::FromAddress(obj->address());
+ if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
+ obj->Iterate(&updating_visitor);
+ p->ClearFlag(Page::RESCAN_ON_EVACUATION);
+ }
}
}
}
int npages = evacuation_candidates_.length();
- for (int i = 0; i < npages; i++) {
- Page* p = evacuation_candidates_[i];
- ASSERT(p->IsEvacuationCandidate() ||
- p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
-
- if (p->IsEvacuationCandidate()) {
- SlotsBuffer::UpdateSlotsRecordedIn(heap_,
- p->slots_buffer(),
- code_slots_filtering_required);
- if (FLAG_trace_fragmentation) {
- PrintF(" page %p slots buffer: %d\n",
- reinterpret_cast<void*>(p),
- SlotsBuffer::SizeOfChain(p->slots_buffer()));
- }
-
- // Important: skip list should be cleared only after roots were updated
- // because root iteration traverses the stack and might have to find code
- // objects from non-updated pc pointing into evacuation candidate.
- SkipList* list = p->skip_list();
- if (list != NULL) list->Clear();
- } else {
- if (FLAG_gc_verbose) {
- PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n",
- reinterpret_cast<intptr_t>(p));
- }
- PagedSpace* space = static_cast<PagedSpace*>(p->owner());
- p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
+ { GCTracer::Scope gc_scope(
+ tracer_, GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED);
+ for (int i = 0; i < npages; i++) {
+ Page* p = evacuation_candidates_[i];
+ ASSERT(p->IsEvacuationCandidate() ||
+ p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
+
+ if (p->IsEvacuationCandidate()) {
+ SlotsBuffer::UpdateSlotsRecordedIn(heap_,
+ p->slots_buffer(),
+ code_slots_filtering_required);
+ if (FLAG_trace_fragmentation) {
+ PrintF(" page %p slots buffer: %d\n",
+ reinterpret_cast<void*>(p),
+ SlotsBuffer::SizeOfChain(p->slots_buffer()));
+ }
- switch (space->identity()) {
- case OLD_DATA_SPACE:
- SweepConservatively(space, p);
- break;
- case OLD_POINTER_SPACE:
- SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
- space, p, &updating_visitor);
- break;
- case CODE_SPACE:
- SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, REBUILD_SKIP_LIST>(
- space, p, &updating_visitor);
- break;
- default:
- UNREACHABLE();
- break;
+ // Important: skip list should be cleared only after roots were updated
+ // because root iteration traverses the stack and might have to find
+ // code objects from non-updated pc pointing into evacuation candidate.
+ SkipList* list = p->skip_list();
+ if (list != NULL) list->Clear();
+ } else {
+ if (FLAG_gc_verbose) {
+ PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n",
+ reinterpret_cast<intptr_t>(p));
+ }
+ PagedSpace* space = static_cast<PagedSpace*>(p->owner());
+ p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
+
+ switch (space->identity()) {
+ case OLD_DATA_SPACE:
+ SweepConservatively(space, p);
+ break;
+ case OLD_POINTER_SPACE:
+ SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, IGNORE_SKIP_LIST>(
+ space, p, &updating_visitor);
+ break;
+ case CODE_SPACE:
+ SweepPrecisely<SWEEP_AND_VISIT_LIVE_OBJECTS, REBUILD_SKIP_LIST>(
+ space, p, &updating_visitor);
+ break;
+ default:
+ UNREACHABLE();
+ break;
+ }
}
}
}
+ GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_UPDATE_MISC_POINTERS);
+
// Update pointers from cells.
HeapObjectIterator cell_iterator(heap_->cell_space());
for (HeapObject* cell = cell_iterator.Next();
@@ -3079,9 +3404,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
heap_->UpdateReferencesInExternalStringTable(
&UpdateReferenceInExternalStringTableEntry);
- // Update JSFunction pointers from the runtime profiler.
- heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
- &updating_visitor);
+ if (!FLAG_watch_ic_patching) {
+ // Update JSFunction pointers from the runtime profiler.
+ heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact(
+ &updating_visitor);
+ }
EvacuationWeakObjectRetainer evacuation_object_retainer;
heap()->ProcessWeakReferences(&evacuation_object_retainer);
@@ -3102,10 +3429,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
Page* p = evacuation_candidates_[i];
if (!p->IsEvacuationCandidate()) continue;
PagedSpace* space = static_cast<PagedSpace*>(p->owner());
- space->Free(p->ObjectAreaStart(), Page::kObjectAreaSize);
+ space->Free(p->area_start(), p->area_size());
p->set_scan_on_scavenge(false);
slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
- p->ClearEvacuationCandidate();
+ p->ResetLiveBytes();
+ space->ReleasePage(p);
}
evacuation_candidates_.Rewind(0);
compacting_ = false;
@@ -3403,23 +3731,27 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
int last_cell_index =
Bitmap::IndexToCell(
Bitmap::CellAlignIndex(
- p->AddressToMarkbitIndex(p->ObjectAreaEnd())));
+ p->AddressToMarkbitIndex(p->area_end())));
+
+ int cell_index =
+ Bitmap::IndexToCell(
+ Bitmap::CellAlignIndex(
+ p->AddressToMarkbitIndex(p->area_start())));
- int cell_index = Page::kFirstUsedCell;
intptr_t freed_bytes = 0;
// This is the start of the 32 word block that we are currently looking at.
- Address block_address = p->ObjectAreaStart();
+ Address block_address = p->area_start();
// Skip over all the dead objects at the start of the page and mark them free.
- for (cell_index = Page::kFirstUsedCell;
+ for (;
cell_index < last_cell_index;
cell_index++, block_address += 32 * kPointerSize) {
if (cells[cell_index] != 0) break;
}
- size_t size = block_address - p->ObjectAreaStart();
+ size_t size = block_address - p->area_start();
if (cell_index == last_cell_index) {
- freed_bytes += static_cast<int>(space->Free(p->ObjectAreaStart(),
+ freed_bytes += static_cast<int>(space->Free(p->area_start(),
static_cast<int>(size)));
ASSERT_EQ(0, p->LiveBytes());
return freed_bytes;
@@ -3428,8 +3760,8 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
// first live object.
Address free_end = StartOfLiveObject(block_address, cells[cell_index]);
// Free the first free space.
- size = free_end - p->ObjectAreaStart();
- freed_bytes += space->Free(p->ObjectAreaStart(),
+ size = free_end - p->area_start();
+ freed_bytes += space->Free(p->area_start(),
static_cast<int>(size));
// The start of the current free area is represented in undigested form by
// the address of the last 32-word section that contained a live object and
@@ -3483,8 +3815,7 @@ intptr_t MarkCompactCollector::SweepConservatively(PagedSpace* space, Page* p) {
}
-void MarkCompactCollector::SweepSpace(PagedSpace* space,
- SweeperType sweeper) {
+void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
space->set_was_swept_conservatively(sweeper == CONSERVATIVE ||
sweeper == LAZY_CONSERVATIVE);
@@ -3493,10 +3824,16 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space,
PageIterator it(space);
intptr_t freed_bytes = 0;
+ int pages_swept = 0;
intptr_t newspace_size = space->heap()->new_space()->Size();
bool lazy_sweeping_active = false;
bool unused_page_present = false;
+ intptr_t old_space_size = heap()->PromotedSpaceSize();
+ intptr_t space_left =
+ Min(heap()->OldGenPromotionLimit(old_space_size),
+ heap()->OldGenAllocationLimit(old_space_size)) - old_space_size;
+
while (it.has_next()) {
Page* p = it.next();
@@ -3514,14 +3851,6 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space,
continue;
}
- if (lazy_sweeping_active) {
- if (FLAG_gc_verbose) {
- PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n",
- reinterpret_cast<intptr_t>(p));
- }
- continue;
- }
-
// One unused page is kept, all further are released before sweeping them.
if (p->LiveBytes() == 0) {
if (unused_page_present) {
@@ -3529,37 +3858,63 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space,
PrintF("Sweeping 0x%" V8PRIxPTR " released page.\n",
reinterpret_cast<intptr_t>(p));
}
+ // Adjust unswept free bytes because releasing a page expects said
+ // counter to be accurate for unswept pages.
+ space->IncreaseUnsweptFreeBytes(p);
space->ReleasePage(p);
continue;
}
unused_page_present = true;
}
- if (FLAG_gc_verbose) {
- PrintF("Sweeping 0x%" V8PRIxPTR " with sweeper %d.\n",
- reinterpret_cast<intptr_t>(p),
- sweeper);
+ if (lazy_sweeping_active) {
+ if (FLAG_gc_verbose) {
+ PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n",
+ reinterpret_cast<intptr_t>(p));
+ }
+ space->IncreaseUnsweptFreeBytes(p);
+ continue;
}
switch (sweeper) {
case CONSERVATIVE: {
+ if (FLAG_gc_verbose) {
+ PrintF("Sweeping 0x%" V8PRIxPTR " conservatively.\n",
+ reinterpret_cast<intptr_t>(p));
+ }
SweepConservatively(space, p);
+ pages_swept++;
break;
}
case LAZY_CONSERVATIVE: {
+ if (FLAG_gc_verbose) {
+ PrintF("Sweeping 0x%" V8PRIxPTR " conservatively as needed.\n",
+ reinterpret_cast<intptr_t>(p));
+ }
freed_bytes += SweepConservatively(space, p);
- if (freed_bytes >= newspace_size && p != space->LastPage()) {
- space->SetPagesToSweep(p->next_page(), space->anchor());
+ pages_swept++;
+ if (space_left + freed_bytes > newspace_size) {
+ space->SetPagesToSweep(p->next_page());
lazy_sweeping_active = true;
+ } else {
+ if (FLAG_gc_verbose) {
+ PrintF("Only %" V8PRIdPTR " bytes freed. Still sweeping.\n",
+ freed_bytes);
+ }
}
break;
}
case PRECISE: {
+ if (FLAG_gc_verbose) {
+ PrintF("Sweeping 0x%" V8PRIxPTR " precisely.\n",
+ reinterpret_cast<intptr_t>(p));
+ }
if (space->identity() == CODE_SPACE) {
SweepPrecisely<SWEEP_ONLY, REBUILD_SKIP_LIST>(space, p, NULL);
} else {
SweepPrecisely<SWEEP_ONLY, IGNORE_SKIP_LIST>(space, p, NULL);
}
+ pages_swept++;
break;
}
default: {
@@ -3568,6 +3923,12 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space,
}
}
+ if (FLAG_gc_verbose) {
+ PrintF("SweepSpace: %s (%d pages swept)\n",
+ AllocationSpaceName(space->identity()),
+ pages_swept);
+ }
+
// Give pages that are queued to be freed back to the OS.
heap()->FreeQueuedChunks();
}
@@ -3580,6 +3941,7 @@ void MarkCompactCollector::SweepSpaces() {
#endif
SweeperType how_to_sweep =
FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE;
+ if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE;
if (sweep_precisely_) how_to_sweep = PRECISE;
// Noncompacting collections simply sweep the spaces to clear the mark
// bits and free the nonlive blocks (for old and map spaces). We sweep
@@ -3594,9 +3956,7 @@ void MarkCompactCollector::SweepSpaces() {
SweepSpace(heap()->cell_space(), PRECISE);
- { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
- EvacuateNewSpaceAndCandidates();
- }
+ EvacuateNewSpaceAndCandidates();
// ClearNonLiveTransitions depends on precise sweeping of map space to
// detect whether unmarked map became dead in this collection or in one
diff --git a/src/3rdparty/v8/src/mark-compact.h b/src/3rdparty/v8/src/mark-compact.h
index 254f175..66ffd19 100644
--- a/src/3rdparty/v8/src/mark-compact.h
+++ b/src/3rdparty/v8/src/mark-compact.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -198,7 +198,7 @@ class MarkingDeque {
ASSERT(object->IsHeapObject());
if (IsFull()) {
Marking::BlackToGrey(object);
- MemoryChunk::IncrementLiveBytes(object->address(), -object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(object->address(), -object->Size());
SetOverflowed();
} else {
array_[top_] = object;
@@ -374,7 +374,7 @@ class SlotsBuffer {
static const int kNumberOfElements = 1021;
private:
- static const int kChainLengthThreshold = 6;
+ static const int kChainLengthThreshold = 15;
intptr_t idx_;
intptr_t chain_length_;
@@ -383,6 +383,10 @@ class SlotsBuffer {
};
+// Defined in isolate.h.
+class ThreadLocalTop;
+
+
// -------------------------------------------------------------------------
// Mark-Compact collector
class MarkCompactCollector {
@@ -403,7 +407,7 @@ class MarkCompactCollector {
// object from the forwarding address of the previous live object in the
// page as input, and is updated to contain the offset to be used for the
// next live object in the same page. For spaces using a different
- // encoding (ie, contiguous spaces), the offset parameter is ignored.
+ // encoding (i.e., contiguous spaces), the offset parameter is ignored.
typedef void (*EncodingFunction)(Heap* heap,
HeapObject* old_object,
int object_size,
@@ -416,14 +420,9 @@ class MarkCompactCollector {
// Pointer to member function, used in IterateLiveObjects.
typedef int (MarkCompactCollector::*LiveObjectCallback)(HeapObject* obj);
- // Set the global force_compaction flag, it must be called before Prepare
- // to take effect.
+ // Set the global flags, it must be called before Prepare to take effect.
inline void SetFlags(int flags);
- inline bool PreciseSweepingRequired() {
- return sweep_precisely_;
- }
-
static void Initialize();
void CollectEvacuationCandidates(PagedSpace* space);
@@ -437,7 +436,12 @@ class MarkCompactCollector {
// Performs a global garbage collection.
void CollectGarbage();
- bool StartCompaction();
+ enum CompactionMode {
+ INCREMENTAL_COMPACTION,
+ NON_INCREMENTAL_COMPACTION
+ };
+
+ bool StartCompaction(CompactionMode mode);
void AbortCompaction();
@@ -568,6 +572,10 @@ class MarkCompactCollector {
// heap.
bool sweep_precisely_;
+ bool reduce_memory_footprint_;
+
+ bool abort_incremental_marking_;
+
// True if we are collecting slots to perform evacuation from evacuation
// candidates.
bool compacting_;
@@ -576,6 +584,8 @@ class MarkCompactCollector {
bool collect_maps_;
+ bool flush_monomorphic_ics_;
+
// A pointer to the current stack-allocated GC tracer object during a full
// collection (NULL before and after).
GCTracer* tracer_;
@@ -603,6 +613,14 @@ class MarkCompactCollector {
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
+ // Mark non-optimize code for functions inlined into the given optimized
+ // code. This will prevent it from being flushed.
+ void MarkInlinedFunctionsCode(Code* code);
+
+ // Mark code objects that are active on the stack to prevent them
+ // from being flushed.
+ void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
+
void PrepareForCodeFlushing();
// Marking operations for objects reachable from roots.
@@ -610,8 +628,14 @@ class MarkCompactCollector {
void AfterMarking();
+ // Marks the object black and pushes it on the marking stack.
+ // This is for non-incremental marking.
INLINE(void MarkObject(HeapObject* obj, MarkBit mark_bit));
+ INLINE(bool MarkObjectWithoutPush(HeapObject* object));
+ INLINE(void MarkObjectAndPush(HeapObject* value));
+
+ // Marks the object black. This is for non-incremental marking.
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
void ProcessNewlyMarkedObject(HeapObject* obj);
@@ -625,6 +649,7 @@ class MarkCompactCollector {
// Mark a Map and its DescriptorArray together, skipping transitions.
void MarkMapContents(Map* map);
+ void MarkAccessorPairSlot(HeapObject* accessors, int offset);
void MarkDescriptorArray(DescriptorArray* descriptors);
// Mark the heap roots and all objects reachable from them.
@@ -672,6 +697,8 @@ class MarkCompactCollector {
// Map transitions from a live map to a dead map must be killed.
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
+ void ClearNonLivePrototypeTransitions(Map* map);
+ void ClearNonLiveMapTransitions(Map* map, MarkBit map_mark);
// Marking detaches initial maps from SharedFunctionInfo objects
// to make this reference weak. We need to reattach initial maps
diff --git a/src/3rdparty/v8/src/math.js b/src/3rdparty/v8/src/math.js
index b5a6d18..8e735c4 100644
--- a/src/3rdparty/v8/src/math.js
+++ b/src/3rdparty/v8/src/math.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,15 +29,15 @@
// Keep reference to original values of some global properties. This
// has the added benefit that the code in this file is isolated from
// changes to these properties.
-const $floor = MathFloor;
-const $random = MathRandom;
-const $abs = MathAbs;
+var $floor = MathFloor;
+var $random = MathRandom;
+var $abs = MathAbs;
// Instance class name can only be set on functions. That is the only
// purpose for MathConstructor.
function MathConstructor() {}
%FunctionSetInstanceClassName(MathConstructor, 'Math');
-const $Math = new MathConstructor();
+var $Math = new MathConstructor();
$Math.__proto__ = $Object.prototype;
%SetProperty(global, "Math", $Math, DONT_ENUM);
@@ -119,6 +119,19 @@ function MathLog(x) {
// ECMA 262 - 15.8.2.11
function MathMax(arg1, arg2) { // length == 2
var length = %_ArgumentsLength();
+ if (length == 2) {
+ if (!IS_NUMBER(arg1)) arg1 = NonNumberToNumber(arg1);
+ if (!IS_NUMBER(arg2)) arg2 = NonNumberToNumber(arg2);
+ if (arg2 > arg1) return arg2;
+ if (arg1 > arg2) return arg1;
+ if (arg1 == arg2) {
+ // Make sure -0 is considered less than +0. -0 is never a Smi, +0 can be
+ // a Smi or a heap number.
+ return (arg1 == 0 && !%_IsSmi(arg1) && 1 / arg1 < 0) ? arg2 : arg1;
+ }
+ // All comparisons failed, one of the arguments must be NaN.
+ return 0/0; // Compiler constant-folds this to NaN.
+ }
if (length == 0) {
return -1/0; // Compiler constant-folds this to -Infinity.
}
@@ -131,7 +144,7 @@ function MathMax(arg1, arg2) { // length == 2
if (NUMBER_IS_NAN(n)) return n;
// Make sure +0 is considered greater than -0. -0 is never a Smi, +0 can be
// a Smi or heap number.
- if (n > r || (r === 0 && n === 0 && !%_IsSmi(r) && 1 / r < 0)) r = n;
+ if (n > r || (r == 0 && n == 0 && !%_IsSmi(r) && 1 / r < 0)) r = n;
}
return r;
}
@@ -139,6 +152,19 @@ function MathMax(arg1, arg2) { // length == 2
// ECMA 262 - 15.8.2.12
function MathMin(arg1, arg2) { // length == 2
var length = %_ArgumentsLength();
+ if (length == 2) {
+ if (!IS_NUMBER(arg1)) arg1 = NonNumberToNumber(arg1);
+ if (!IS_NUMBER(arg2)) arg2 = NonNumberToNumber(arg2);
+ if (arg2 > arg1) return arg1;
+ if (arg1 > arg2) return arg2;
+ if (arg1 == arg2) {
+ // Make sure -0 is considered less than +0. -0 is never a Smi, +0 can be
+ // a Smi or a heap number.
+ return (arg1 == 0 && !%_IsSmi(arg1) && 1 / arg1 < 0) ? arg1 : arg2;
+ }
+ // All comparisons failed, one of the arguments must be NaN.
+ return 0/0; // Compiler constant-folds this to NaN.
+ }
if (length == 0) {
return 1/0; // Compiler constant-folds this to Infinity.
}
@@ -149,9 +175,9 @@ function MathMin(arg1, arg2) { // length == 2
var n = %_Arguments(i);
if (!IS_NUMBER(n)) n = NonNumberToNumber(n);
if (NUMBER_IS_NAN(n)) return n;
- // Make sure -0 is considered less than +0. -0 is never a Smi, +0 can b a
+ // Make sure -0 is considered less than +0. -0 is never a Smi, +0 can be a
// Smi or a heap number.
- if (n < r || (r === 0 && n === 0 && !%_IsSmi(n) && 1 / n < 0)) r = n;
+ if (n < r || (r == 0 && n == 0 && !%_IsSmi(n) && 1 / n < 0)) r = n;
}
return r;
}
@@ -189,7 +215,7 @@ function MathSqrt(x) {
// ECMA 262 - 15.8.2.18
function MathTan(x) {
if (!IS_NUMBER(x)) x = NonNumberToNumber(x);
- return %Math_tan(x);
+ return %_MathTan(x);
}
@@ -239,7 +265,7 @@ function SetUpMath() {
// Set up non-enumerable functions of the Math object and
// set their names.
- InstallFunctionsOnHiddenPrototype($Math, DONT_ENUM, $Array(
+ InstallFunctions($Math, DONT_ENUM, $Array(
"random", MathRandom,
"abs", MathAbs,
"acos", MathAcos,
diff --git a/src/3rdparty/v8/src/messages.js b/src/3rdparty/v8/src/messages.js
index b370422..a3adcf8 100644
--- a/src/3rdparty/v8/src/messages.js
+++ b/src/3rdparty/v8/src/messages.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,17 +25,16 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
// -------------------------------------------------------------------
//
// If this object gets passed to an error constructor the error will
// get an accessor for .message that constructs a descriptive error
// message on access.
-const kAddMessageAccessorsMarker = { };
+var kAddMessageAccessorsMarker = { };
// This will be lazily initialized when first needed (and forcibly
// overwritten even though it's const).
-const kMessages = 0;
+var kMessages = 0;
function FormatString(format, message) {
var args = %MessageGetArguments(message);
@@ -206,11 +205,12 @@ function FormatMessage(message) {
"illegal_break", ["Illegal break statement"],
"illegal_continue", ["Illegal continue statement"],
"illegal_return", ["Illegal return statement"],
+ "illegal_let", ["Illegal let declaration outside extended mode"],
"error_loading_debugger", ["Error loading debugger"],
"no_input_to_regexp", ["No input to ", "%0"],
"invalid_json", ["String '", "%0", "' is not valid JSON"],
"circular_structure", ["Converting circular structure to JSON"],
- "obj_ctor_property_non_object", ["Object.", "%0", " called on non-object"],
+ "called_on_non_object", ["%0", " called on non-object"],
"called_on_null_or_undefined", ["%0", " called on null or undefined"],
"array_indexof_not_defined", ["Array.getIndexOf: Argument undefined"],
"object_not_extensible", ["Can't add property ", "%0", ", object is not extensible"],
@@ -245,18 +245,26 @@ function FormatMessage(message) {
"unprotected_const", ["Illegal const declaration in unprotected statement context."],
"cant_prevent_ext_external_array_elements", ["Cannot prevent extension of an object with external array elements"],
"redef_external_array_element", ["Cannot redefine a property of an object with external array elements"],
+ "harmony_const_assign", ["Assignment to constant variable."],
+ "invalid_module_path", ["Module does not export '", "%0", "', or export is not itself a module"],
+ "module_type_error", ["Module '", "%0", "' used improperly"],
];
var messages = { __proto__ : null };
- var desc = new PropertyDescriptor();
- desc.setConfigurable(false);
- desc.setEnumerable(false);
- desc.setWritable(false);
for (var i = 0; i < messagesDictionary.length; i += 2) {
var key = messagesDictionary[i];
var format = messagesDictionary[i + 1];
- ObjectFreeze(format);
- desc.setValue(format);
- DefineOwnProperty(messages, key, desc);
+
+ for (var j = 0; j < format.length; j++) {
+ %IgnoreAttributesAndSetProperty(format, %_NumberToString(j), format[j],
+ DONT_DELETE | READ_ONLY | DONT_ENUM);
+ }
+ %IgnoreAttributesAndSetProperty(format, 'length', format.length,
+ DONT_DELETE | READ_ONLY | DONT_ENUM);
+ %PreventExtensions(format);
+ %IgnoreAttributesAndSetProperty(messages,
+ key,
+ format,
+ DONT_DELETE | DONT_ENUM | READ_ONLY);
}
%PreventExtensions(messages);
%IgnoreAttributesAndSetProperty(builtins, "kMessages",
@@ -389,7 +397,7 @@ function ScriptLocationFromPosition(position,
}
return new SourceLocation(this, position, line, column, start, end);
-};
+}
/**
@@ -419,7 +427,7 @@ function ScriptLocationFromLine(opt_line, opt_column, opt_offset_position) {
// resource.
var column = opt_column || 0;
if (line == 0) {
- column -= this.column_offset
+ column -= this.column_offset;
}
var offset_position = opt_offset_position || 0;
@@ -434,7 +442,8 @@ function ScriptLocationFromLine(opt_line, opt_column, opt_offset_position) {
return null;
}
- return this.locationFromPosition(this.line_ends[offset_line + line - 1] + 1 + column); // line > 0 here.
+ return this.locationFromPosition(
+ this.line_ends[offset_line + line - 1] + 1 + column); // line > 0 here.
}
}
@@ -450,8 +459,10 @@ function ScriptLocationFromLine(opt_line, opt_column, opt_offset_position) {
* invalid
*/
function ScriptSourceSlice(opt_from_line, opt_to_line) {
- var from_line = IS_UNDEFINED(opt_from_line) ? this.line_offset : opt_from_line;
- var to_line = IS_UNDEFINED(opt_to_line) ? this.line_offset + this.lineCount() : opt_to_line
+ var from_line = IS_UNDEFINED(opt_from_line) ? this.line_offset
+ : opt_from_line;
+ var to_line = IS_UNDEFINED(opt_to_line) ? this.line_offset + this.lineCount()
+ : opt_to_line;
// Adjust according to the offset within the resource.
from_line -= this.line_offset;
@@ -471,8 +482,10 @@ function ScriptSourceSlice(opt_from_line, opt_to_line) {
var to_position = to_line == 0 ? 0 : line_ends[to_line - 1] + 1;
// Return a source slice with line numbers re-adjusted to the resource.
- return new SourceSlice(this, from_line + this.line_offset, to_line + this.line_offset,
- from_position, to_position);
+ return new SourceSlice(this,
+ from_line + this.line_offset,
+ to_line + this.line_offset,
+ from_position, to_position);
}
@@ -505,7 +518,7 @@ function ScriptSourceLine(opt_line) {
function ScriptLineCount() {
// Return number of source lines.
return this.line_ends.length;
-};
+}
/**
@@ -522,6 +535,13 @@ function ScriptNameOrSourceURL() {
if (this.name) {
return this.name;
}
+
+ // The result is cached as on long scripts it takes noticable time to search
+ // for the sourceURL.
+ if (this.hasCachedNameOrSourceURL)
+ return this.cachedNameOrSourceURL;
+ this.hasCachedNameOrSourceURL = true;
+
// TODO(608): the spaces in a regexp below had to be escaped as \040
// because this file is being processed by js2c whose handling of spaces
// in regexps is broken. Also, ['"] are excluded from allowed URLs to
@@ -530,6 +550,7 @@ function ScriptNameOrSourceURL() {
// the scanner/parser.
var source = ToString(this.source);
var sourceUrlPos = %StringIndexOf(source, "sourceURL=", 0);
+ this.cachedNameOrSourceURL = this.name;
if (sourceUrlPos > 4) {
var sourceUrlPattern =
/\/\/@[\040\t]sourceURL=[\040\t]*([^\s\'\"]*)[\040\t]*$/gm;
@@ -540,15 +561,17 @@ function ScriptNameOrSourceURL() {
var match =
%_RegExpExec(sourceUrlPattern, source, sourceUrlPos - 4, matchInfo);
if (match) {
- return SubString(source, matchInfo[CAPTURE(2)], matchInfo[CAPTURE(3)]);
+ this.cachedNameOrSourceURL =
+ SubString(source, matchInfo[CAPTURE(2)], matchInfo[CAPTURE(3)]);
}
}
- return this.name;
+ return this.cachedNameOrSourceURL;
}
SetUpLockedPrototype(Script,
- $Array("source", "name", "line_ends", "line_offset", "column_offset"),
+ $Array("source", "name", "line_ends", "line_offset", "column_offset",
+ "cachedNameOrSourceURL", "hasCachedNameOrSourceURL" ),
$Array(
"lineFromPosition", ScriptLineFromPosition,
"locationFromPosition", ScriptLocationFromPosition,
@@ -570,10 +593,10 @@ SetUpLockedPrototype(Script,
* position : position within the source
* start : position of start of source context (inclusive)
* end : position of end of source context (not inclusive)
- * Source text for the source context is the character interval [start, end[. In
- * most cases end will point to a newline character. It might point just past
- * the final position of the source if the last source line does not end with a
- * newline character.
+ * Source text for the source context is the character interval
+ * [start, end[. In most cases end will point to a newline character.
+ * It might point just past the final position of the source if the last
+ * source line does not end with a newline character.
* @param {Script} script The Script object for which this is a location
* @param {number} position Source position for the location
* @param {number} line The line number for the location
@@ -591,7 +614,7 @@ function SourceLocation(script, position, line, column, start, end) {
this.end = end;
}
-const kLineLengthLimit = 78;
+var kLineLengthLimit = 78;
/**
* Restrict source location start and end positions to make the source slice
@@ -640,7 +663,7 @@ function SourceLocationRestrict(opt_limit, opt_before) {
this.end = this.start + limit;
}
}
-};
+}
/**
@@ -649,8 +672,11 @@ function SourceLocationRestrict(opt_limit, opt_before) {
* Source text for this location.
*/
function SourceLocationSourceText() {
- return %_CallFunction(this.script.source, this.start, this.end, StringSubstring);
-};
+ return %_CallFunction(this.script.source,
+ this.start,
+ this.end,
+ StringSubstring);
+}
SetUpLockedPrototype(SourceLocation,
@@ -658,7 +684,7 @@ SetUpLockedPrototype(SourceLocation,
$Array(
"restrict", SourceLocationRestrict,
"sourceText", SourceLocationSourceText
- )
+ )
);
@@ -698,7 +724,7 @@ function SourceSliceSourceText() {
this.from_position,
this.to_position,
StringSubstring);
-};
+}
SetUpLockedPrototype(SourceSlice,
$Array("script", "from_line", "to_line", "from_position", "to_position"),
@@ -733,24 +759,19 @@ function DefineOneShotAccessor(obj, name, fun) {
// can't rely on 'this' being the same as 'obj'.
var hasBeenSet = false;
var value;
- function getter() {
+ var getter = function() {
if (hasBeenSet) {
return value;
}
hasBeenSet = true;
value = fun(obj);
return value;
- }
- function setter(v) {
+ };
+ var setter = function(v) {
hasBeenSet = true;
value = v;
- }
- var desc = { get: getter,
- set: setter,
- enumerable: false,
- configurable: true };
- desc = ToPropertyDescriptor(desc);
- DefineOwnProperty(obj, name, desc, true);
+ };
+ %DefineOrRedefineAccessorProperty(obj, name, getter, setter, DONT_ENUM);
}
function CallSite(receiver, fun, pos) {
@@ -761,7 +782,7 @@ function CallSite(receiver, fun, pos) {
function CallSiteGetThis() {
return this.receiver;
-};
+}
function CallSiteGetTypeName() {
var constructor = this.receiver.constructor;
@@ -773,33 +794,33 @@ function CallSiteGetTypeName() {
return %_CallFunction(this.receiver, ObjectToString);
}
return constructorName;
-};
+}
function CallSiteIsToplevel() {
if (this.receiver == null) {
return true;
}
return IS_GLOBAL(this.receiver);
-};
+}
function CallSiteIsEval() {
var script = %FunctionGetScript(this.fun);
return script && script.compilation_type == COMPILATION_TYPE_EVAL;
-};
+}
function CallSiteGetEvalOrigin() {
var script = %FunctionGetScript(this.fun);
return FormatEvalOrigin(script);
-};
+}
function CallSiteGetScriptNameOrSourceURL() {
var script = %FunctionGetScript(this.fun);
return script ? script.nameOrSourceURL() : null;
-};
+}
function CallSiteGetFunction() {
return this.fun;
-};
+}
function CallSiteGetFunctionName() {
// See if the function knows its own name
@@ -815,15 +836,19 @@ function CallSiteGetFunctionName() {
return "eval";
}
return null;
-};
+}
function CallSiteGetMethodName() {
// See if we can find a unique property on the receiver that holds
// this function.
var ownName = this.fun.name;
if (ownName && this.receiver &&
- (%_CallFunction(this.receiver, ownName, ObjectLookupGetter) === this.fun ||
- %_CallFunction(this.receiver, ownName, ObjectLookupSetter) === this.fun ||
+ (%_CallFunction(this.receiver,
+ ownName,
+ ObjectLookupGetter) === this.fun ||
+ %_CallFunction(this.receiver,
+ ownName,
+ ObjectLookupSetter) === this.fun ||
this.receiver[ownName] === this.fun)) {
// To handle DontEnum properties we guess that the method has
// the same name as the function.
@@ -833,7 +858,8 @@ function CallSiteGetMethodName() {
for (var prop in this.receiver) {
if (this.receiver.__lookupGetter__(prop) === this.fun ||
this.receiver.__lookupSetter__(prop) === this.fun ||
- (!this.receiver.__lookupGetter__(prop) && this.receiver[prop] === this.fun)) {
+ (!this.receiver.__lookupGetter__(prop) &&
+ this.receiver[prop] === this.fun)) {
// If we find more than one match bail out to avoid confusion.
if (name) {
return null;
@@ -845,12 +871,12 @@ function CallSiteGetMethodName() {
return name;
}
return null;
-};
+}
function CallSiteGetFileName() {
var script = %FunctionGetScript(this.fun);
return script ? script.name : null;
-};
+}
function CallSiteGetLineNumber() {
if (this.pos == -1) {
@@ -862,7 +888,7 @@ function CallSiteGetLineNumber() {
location = script.locationFromPosition(this.pos, true);
}
return location ? location.line + 1 : null;
-};
+}
function CallSiteGetColumnNumber() {
if (this.pos == -1) {
@@ -874,16 +900,16 @@ function CallSiteGetColumnNumber() {
location = script.locationFromPosition(this.pos, true);
}
return location ? location.column + 1: null;
-};
+}
function CallSiteIsNative() {
var script = %FunctionGetScript(this.fun);
return script ? (script.type == TYPE_NATIVE) : false;
-};
+}
function CallSiteGetPosition() {
return this.pos;
-};
+}
function CallSiteIsConstructor() {
var constructor = this.receiver ? this.receiver.constructor : null;
@@ -891,7 +917,7 @@ function CallSiteIsConstructor() {
return false;
}
return this.fun === constructor;
-};
+}
SetUpLockedPrototype(CallSite, $Array("receiver", "fun", "pos"), $Array(
"getThis", CallSiteGetThis,
@@ -934,12 +960,13 @@ function FormatEvalOrigin(script) {
// eval script originated from "real" source.
if (eval_from_script.name) {
eval_origin += " (" + eval_from_script.name;
- var location = eval_from_script.locationFromPosition(script.eval_from_script_position, true);
+ var location = eval_from_script.locationFromPosition(
+ script.eval_from_script_position, true);
if (location) {
eval_origin += ":" + (location.line + 1);
eval_origin += ":" + (location.column + 1);
}
- eval_origin += ")"
+ eval_origin += ")";
} else {
eval_origin += " (unknown source)";
}
@@ -947,7 +974,7 @@ function FormatEvalOrigin(script) {
}
return eval_origin;
-};
+}
function FormatSourcePosition(frame) {
var fileName;
@@ -956,8 +983,9 @@ function FormatSourcePosition(frame) {
fileLocation = "native";
} else if (frame.isEval()) {
fileName = frame.getScriptNameOrSourceURL();
- if (!fileName)
+ if (!fileName) {
fileLocation = frame.getEvalOrigin();
+ }
} else {
fileName = frame.getFileName();
}
@@ -1060,19 +1088,19 @@ function captureStackTrace(obj, cons_opt) {
if (stackTraceLimit < 0 || stackTraceLimit > 10000) {
stackTraceLimit = 10000;
}
- var raw_stack = %CollectStackTrace(cons_opt
- ? cons_opt
- : captureStackTrace, stackTraceLimit);
+ var raw_stack = %CollectStackTrace(obj,
+ cons_opt ? cons_opt : captureStackTrace,
+ stackTraceLimit);
DefineOneShotAccessor(obj, 'stack', function (obj) {
return FormatRawStackTrace(obj, raw_stack);
});
-};
+}
function SetUpError() {
// Define special error type constructors.
- function DefineError(f) {
+ var DefineError = function(f) {
// Store the error function in both the global object
// and the runtime object. The function is fetched
// from the runtime object when throwing errors from
@@ -1088,7 +1116,7 @@ function SetUpError() {
// However, it can't be an instance of the Error object because
// it hasn't been properly configured yet. Instead we create a
// special not-a-true-error-but-close-enough object.
- function ErrorPrototype() {}
+ var ErrorPrototype = function() {};
%FunctionSetPrototype(ErrorPrototype, $Object.prototype);
%FunctionSetInstanceClassName(ErrorPrototype, 'Error');
%FunctionSetPrototype(f, new ErrorPrototype());
@@ -1130,7 +1158,7 @@ function SetUpError() {
}
});
%SetNativeFlag(f);
- }
+ };
DefineError(function Error() { });
DefineError(function TypeError() { });
@@ -1149,14 +1177,14 @@ $Error.captureStackTrace = captureStackTrace;
// Global list of error objects visited during ErrorToString. This is
// used to detect cycles in error toString formatting.
-const visited_errors = new InternalArray();
-const cyclic_error_marker = new $Object();
+var visited_errors = new InternalArray();
+var cyclic_error_marker = new $Object();
function ErrorToStringDetectCycle(error) {
if (!%PushIfAbsent(visited_errors, error)) throw cyclic_error_marker;
try {
var type = error.type;
- var name = error.name
+ var name = error.name;
name = IS_UNDEFINED(name) ? "Error" : TO_STRING_INLINE(name);
var message = error.message;
var hasMessage = %_CallFunction(error, "message", ObjectHasOwnProperty);
@@ -1173,9 +1201,8 @@ function ErrorToStringDetectCycle(error) {
}
function ErrorToString() {
- if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
- throw MakeTypeError("called_on_null_or_undefined",
- ["Error.prototype.toString"]);
+ if (!IS_SPEC_OBJECT(this)) {
+ throw MakeTypeError("called_on_non_object", ["Error.prototype.toString"]);
}
try {
@@ -1195,4 +1222,4 @@ InstallFunctions($Error.prototype, DONT_ENUM, ['toString', ErrorToString]);
// Boilerplate for exceptions for stack overflows. Used from
// Isolate::StackOverflow().
-const kStackOverflowBoilerplate = MakeRangeError('stack_overflow', []);
+var kStackOverflowBoilerplate = MakeRangeError('stack_overflow', []);
diff --git a/src/3rdparty/v8/src/mips/assembler-mips-inl.h b/src/3rdparty/v8/src/mips/assembler-mips-inl.h
index 2ba9760..2ff4710 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips-inl.h
+++ b/src/3rdparty/v8/src/mips/assembler-mips-inl.h
@@ -30,13 +30,14 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#ifndef V8_MIPS_ASSEMBLER_MIPS_INL_H_
#define V8_MIPS_ASSEMBLER_MIPS_INL_H_
#include "mips/assembler-mips.h"
+
#include "cpu.h"
#include "debug.h"
@@ -78,6 +79,16 @@ bool Operand::is_reg() const {
}
+int FPURegister::ToAllocationIndex(FPURegister reg) {
+ ASSERT(reg.code() % 2 == 0);
+ ASSERT(reg.code() / 2 < kNumAllocatableRegisters);
+ ASSERT(reg.is_valid());
+ ASSERT(!reg.is(kDoubleRegZero));
+ ASSERT(!reg.is(kLithiumScratchDouble));
+ return (reg.code() / 2);
+}
+
+
// -----------------------------------------------------------------------------
// RelocInfo.
@@ -106,13 +117,31 @@ Address RelocInfo::target_address() {
Address RelocInfo::target_address_address() {
- ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
- return reinterpret_cast<Address>(pc_);
+ ASSERT(IsCodeTarget(rmode_) ||
+ rmode_ == RUNTIME_ENTRY ||
+ rmode_ == EMBEDDED_OBJECT ||
+ rmode_ == EXTERNAL_REFERENCE);
+ // Read the address of the word containing the target_address in an
+ // instruction stream.
+ // The only architecture-independent user of this function is the serializer.
+ // The serializer uses it to find out how many raw bytes of instruction to
+ // output before the next target.
+ // For an instruction like LUI/ORI where the target bits are mixed into the
+ // instruction bits, the size of the target will be zero, indicating that the
+ // serializer should not step forward in memory after a target is resolved
+ // and written. In this case the target_address_address function should
+ // return the end of the instructions to be patched, allowing the
+ // deserializer to deserialize the instructions as raw bytes and put them in
+ // place, ready to be patched with the target. After jump optimization,
+ // that is the address of the instruction that follows J/JAL/JR/JALR
+ // instruction.
+ return reinterpret_cast<Address>(
+ pc_ + Assembler::kInstructionsFor32BitConstant * Assembler::kInstrSize);
}
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return Assembler::kSpecialTargetSize;
}
@@ -133,7 +162,7 @@ Object* RelocInfo::target_object() {
}
-Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
+Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
return Handle<Object>(reinterpret_cast<Object**>(
Assembler::target_address_at(pc_)));
@@ -270,7 +299,7 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- visitor->VisitExternalReference(target_reference_address());
+ visitor->VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
@@ -296,7 +325,7 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- StaticVisitor::VisitExternalReference(target_reference_address());
+ StaticVisitor::VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
diff --git a/src/3rdparty/v8/src/mips/assembler-mips.cc b/src/3rdparty/v8/src/mips/assembler-mips.cc
index e933181..fa64e1e 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/assembler-mips.cc
@@ -30,7 +30,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#include "v8.h"
@@ -143,7 +143,7 @@ int ToNumber(Register reg) {
27, // k1
28, // gp
29, // sp
- 30, // s8_fp
+ 30, // fp
31, // ra
};
return kNumbers[reg.code()];
@@ -163,7 +163,7 @@ Register ToRegister(int num) {
k0, k1,
gp,
sp,
- s8_fp,
+ fp,
ra
};
return kRegisters[num];
@@ -237,28 +237,28 @@ MemOperand::MemOperand(Register rm, int32_t offset) : Operand(rm) {
static const int kNegOffset = 0x00008000;
// addiu(sp, sp, 4) aka Pop() operation or part of Pop(r)
// operations as post-increment of sp.
-const Instr kPopInstruction = ADDIU | (sp.code() << kRsShift)
- | (sp.code() << kRtShift) | (kPointerSize & kImm16Mask);
+const Instr kPopInstruction = ADDIU | (kRegister_sp_Code << kRsShift)
+ | (kRegister_sp_Code << kRtShift) | (kPointerSize & kImm16Mask);
// addiu(sp, sp, -4) part of Push(r) operation as pre-decrement of sp.
-const Instr kPushInstruction = ADDIU | (sp.code() << kRsShift)
- | (sp.code() << kRtShift) | (-kPointerSize & kImm16Mask);
+const Instr kPushInstruction = ADDIU | (kRegister_sp_Code << kRsShift)
+ | (kRegister_sp_Code << kRtShift) | (-kPointerSize & kImm16Mask);
// sw(r, MemOperand(sp, 0))
-const Instr kPushRegPattern = SW | (sp.code() << kRsShift)
+const Instr kPushRegPattern = SW | (kRegister_sp_Code << kRsShift)
| (0 & kImm16Mask);
// lw(r, MemOperand(sp, 0))
-const Instr kPopRegPattern = LW | (sp.code() << kRsShift)
+const Instr kPopRegPattern = LW | (kRegister_sp_Code << kRsShift)
| (0 & kImm16Mask);
-const Instr kLwRegFpOffsetPattern = LW | (s8_fp.code() << kRsShift)
+const Instr kLwRegFpOffsetPattern = LW | (kRegister_fp_Code << kRsShift)
| (0 & kImm16Mask);
-const Instr kSwRegFpOffsetPattern = SW | (s8_fp.code() << kRsShift)
+const Instr kSwRegFpOffsetPattern = SW | (kRegister_fp_Code << kRsShift)
| (0 & kImm16Mask);
-const Instr kLwRegFpNegOffsetPattern = LW | (s8_fp.code() << kRsShift)
+const Instr kLwRegFpNegOffsetPattern = LW | (kRegister_fp_Code << kRsShift)
| (kNegOffset & kImm16Mask);
-const Instr kSwRegFpNegOffsetPattern = SW | (s8_fp.code() << kRsShift)
+const Instr kSwRegFpNegOffsetPattern = SW | (kRegister_fp_Code << kRsShift)
| (kNegOffset & kImm16Mask);
// A mask for the Rt register for push, pop, lw, sw instructions.
const Instr kRtMask = kRtFieldMask;
@@ -301,7 +301,7 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
own_buffer_ = false;
}
- // Setup buffer pointers.
+ // Set up buffer pointers.
ASSERT(buffer_ != NULL);
pc_ = buffer_;
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
@@ -337,7 +337,7 @@ Assembler::~Assembler() {
void Assembler::GetCode(CodeDesc* desc) {
ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
- // Setup code descriptor.
+ // Set up code descriptor.
desc->buffer = buffer_;
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
@@ -850,7 +850,6 @@ bool Assembler::MustUseReg(RelocInfo::Mode rmode) {
return rmode != RelocInfo::NONE;
}
-
void Assembler::GenInstrRegister(Opcode opcode,
Register rs,
Register rt,
@@ -1245,6 +1244,7 @@ void Assembler::and_(Register rd, Register rs, Register rt) {
void Assembler::andi(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(ANDI, rs, rt, j);
}
@@ -1255,6 +1255,7 @@ void Assembler::or_(Register rd, Register rs, Register rt) {
void Assembler::ori(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(ORI, rs, rt, j);
}
@@ -1265,6 +1266,7 @@ void Assembler::xor_(Register rd, Register rs, Register rt) {
void Assembler::xori(Register rt, Register rs, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(XORI, rs, rt, j);
}
@@ -1316,7 +1318,7 @@ void Assembler::srav(Register rd, Register rt, Register rs) {
void Assembler::rotr(Register rd, Register rt, uint16_t sa) {
// Should be called via MacroAssembler::Ror.
ASSERT(rd.is_valid() && rt.is_valid() && is_uint5(sa));
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
Instr instr = SPECIAL | (1 << kRsShift) | (rt.code() << kRtShift)
| (rd.code() << kRdShift) | (sa << kSaShift) | SRL;
emit(instr);
@@ -1326,7 +1328,7 @@ void Assembler::rotr(Register rd, Register rt, uint16_t sa) {
void Assembler::rotrv(Register rd, Register rt, Register rs) {
// Should be called via MacroAssembler::Ror.
ASSERT(rd.is_valid() && rt.is_valid() && rs.is_valid() );
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
Instr instr = SPECIAL | (rs.code() << kRsShift) | (rt.code() << kRtShift)
| (rd.code() << kRdShift) | (1 << kSaShift) | SRLV;
emit(instr);
@@ -1445,6 +1447,7 @@ void Assembler::swr(Register rd, const MemOperand& rs) {
void Assembler::lui(Register rd, int32_t j) {
+ ASSERT(is_uint16(j));
GenInstrImmediate(LUI, zero_reg, rd, j);
}
@@ -1600,7 +1603,7 @@ void Assembler::clz(Register rd, Register rs) {
void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) {
// Should be called via MacroAssembler::Ins.
// Ins instr has 'rt' field as dest, and two uint5: msb, lsb.
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS);
}
@@ -1608,7 +1611,7 @@ void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) {
void Assembler::ext_(Register rt, Register rs, uint16_t pos, uint16_t size) {
// Should be called via MacroAssembler::Ext.
// Ext instr has 'rt' field as dest, and two uint5: msb, lsb.
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT);
}
@@ -1768,25 +1771,25 @@ void Assembler::ceil_w_d(FPURegister fd, FPURegister fs) {
void Assembler::cvt_l_s(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, S, f0, fs, fd, CVT_L_S);
}
void Assembler::cvt_l_d(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, D, f0, fs, fd, CVT_L_D);
}
void Assembler::trunc_l_s(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, S, f0, fs, fd, TRUNC_L_S);
}
void Assembler::trunc_l_d(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, D, f0, fs, fd, TRUNC_L_D);
}
@@ -1827,7 +1830,7 @@ void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) {
void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, L, f0, fs, fd, CVT_S_L);
}
@@ -1843,7 +1846,7 @@ void Assembler::cvt_d_w(FPURegister fd, FPURegister fs) {
void Assembler::cvt_d_l(FPURegister fd, FPURegister fs) {
- ASSERT(mips32r2);
+ ASSERT(kArchVariant == kMips32r2);
GenInstrRegister(COP1, L, f0, fs, fd, CVT_D_L);
}
@@ -1970,7 +1973,7 @@ void Assembler::GrowBuffer() {
}
CHECK_GT(desc.buffer_size, 0); // No overflow.
- // Setup new buffer.
+ // Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);
desc.instr_size = pc_offset();
diff --git a/src/3rdparty/v8/src/mips/assembler-mips.h b/src/3rdparty/v8/src/mips/assembler-mips.h
index b66ea0d..5e67d0c 100644
--- a/src/3rdparty/v8/src/mips/assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/assembler-mips.h
@@ -30,7 +30,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#ifndef V8_MIPS_ASSEMBLER_MIPS_H_
@@ -125,40 +125,59 @@ struct Register {
int code_;
};
-const Register no_reg = { -1 };
-
-const Register zero_reg = { 0 }; // Always zero.
-const Register at = { 1 }; // at: Reserved for synthetic instructions.
-const Register v0 = { 2 }; // v0, v1: Used when returning multiple values
-const Register v1 = { 3 }; // from subroutines.
-const Register a0 = { 4 }; // a0 - a4: Used to pass non-FP parameters.
-const Register a1 = { 5 };
-const Register a2 = { 6 };
-const Register a3 = { 7 };
-const Register t0 = { 8 }; // t0 - t9: Can be used without reservation, act
-const Register t1 = { 9 }; // as temporary registers and are allowed to
-const Register t2 = { 10 }; // be destroyed by subroutines.
-const Register t3 = { 11 };
-const Register t4 = { 12 };
-const Register t5 = { 13 };
-const Register t6 = { 14 };
-const Register t7 = { 15 };
-const Register s0 = { 16 }; // s0 - s7: Subroutine register variables.
-const Register s1 = { 17 }; // Subroutines that write to these registers
-const Register s2 = { 18 }; // must restore their values before exiting so
-const Register s3 = { 19 }; // that the caller can expect the values to be
-const Register s4 = { 20 }; // preserved.
-const Register s5 = { 21 };
-const Register s6 = { 22 };
-const Register s7 = { 23 };
-const Register t8 = { 24 };
-const Register t9 = { 25 };
-const Register k0 = { 26 }; // k0, k1: Reserved for system calls and
-const Register k1 = { 27 }; // interrupt handlers.
-const Register gp = { 28 }; // gp: Reserved.
-const Register sp = { 29 }; // sp: Stack pointer.
-const Register s8_fp = { 30 }; // fp: Frame pointer.
-const Register ra = { 31 }; // ra: Return address pointer.
+#define REGISTER(N, C) \
+ const int kRegister_ ## N ## _Code = C; \
+ const Register N = { C }
+
+REGISTER(no_reg, -1);
+// Always zero.
+REGISTER(zero_reg, 0);
+// at: Reserved for synthetic instructions.
+REGISTER(at, 1);
+// v0, v1: Used when returning multiple values from subroutines.
+REGISTER(v0, 2);
+REGISTER(v1, 3);
+// a0 - a4: Used to pass non-FP parameters.
+REGISTER(a0, 4);
+REGISTER(a1, 5);
+REGISTER(a2, 6);
+REGISTER(a3, 7);
+// t0 - t9: Can be used without reservation, act as temporary registers and are
+// allowed to be destroyed by subroutines.
+REGISTER(t0, 8);
+REGISTER(t1, 9);
+REGISTER(t2, 10);
+REGISTER(t3, 11);
+REGISTER(t4, 12);
+REGISTER(t5, 13);
+REGISTER(t6, 14);
+REGISTER(t7, 15);
+// s0 - s7: Subroutine register variables. Subroutines that write to these
+// registers must restore their values before exiting so that the caller can
+// expect the values to be preserved.
+REGISTER(s0, 16);
+REGISTER(s1, 17);
+REGISTER(s2, 18);
+REGISTER(s3, 19);
+REGISTER(s4, 20);
+REGISTER(s5, 21);
+REGISTER(s6, 22);
+REGISTER(s7, 23);
+REGISTER(t8, 24);
+REGISTER(t9, 25);
+// k0, k1: Reserved for system calls and interrupt handlers.
+REGISTER(k0, 26);
+REGISTER(k1, 27);
+// gp: Reserved.
+REGISTER(gp, 28);
+// sp: Stack pointer.
+REGISTER(sp, 29);
+// fp: Frame pointer.
+REGISTER(fp, 30);
+// ra: Return address pointer.
+REGISTER(ra, 31);
+
+#undef REGISTER
int ToNumber(Register reg);
@@ -182,12 +201,7 @@ struct FPURegister {
kNumReservedRegisters;
- static int ToAllocationIndex(FPURegister reg) {
- ASSERT(reg.code() % 2 == 0);
- ASSERT(reg.code() / 2 < kNumAllocatableRegisters);
- ASSERT(reg.is_valid());
- return (reg.code() / 2);
- }
+ inline static int ToAllocationIndex(FPURegister reg);
static FPURegister FromAllocationIndex(int index) {
ASSERT(index >= 0 && index < kNumAllocatableRegisters);
@@ -302,6 +316,13 @@ const FPURegister f29 = { 29 };
const FPURegister f30 = { 30 };
const FPURegister f31 = { 31 };
+// Register aliases.
+// cp is assumed to be a callee saved register.
+static const Register& kLithiumScratchReg = s3; // Scratch register.
+static const Register& kLithiumScratchReg2 = s4; // Scratch register.
+static const Register& kRootRegister = s6; // Roots array pointer.
+static const Register& cp = s7; // JavaScript context pointer.
+static const DoubleRegister& kLithiumScratchDouble = f30;
static const FPURegister& kDoubleRegZero = f28;
// FPU (coprocessor 1) control registers.
@@ -550,10 +571,13 @@ class Assembler : public AssemblerBase {
static void JumpLabelToJumpRegister(Address pc);
// This sets the branch destination (which gets loaded at the call address).
- // This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
- set_target_address_at(instruction_payload, target);
+ // This is for calls and branches within generated code. The serializer
+ // has already deserialized the lui/ori instructions etc.
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
+ set_target_address_at(
+ instruction_payload - kInstructionsFor32BitConstant * kInstrSize,
+ target);
}
// This sets the branch destination.
@@ -575,8 +599,7 @@ class Assembler : public AssemblerBase {
// are split across two consecutive instructions and don't exist separately
// in the code, so the serializer should not step forwards in memory after
// a target is resolved and written.
- static const int kCallTargetSize = 0 * kInstrSize;
- static const int kExternalTargetSize = 0 * kInstrSize;
+ static const int kSpecialTargetSize = 0;
// Number of consecutive instructions used to store 32bit constant.
// Before jump-optimizations, this constant was used in
@@ -667,7 +690,7 @@ class Assembler : public AssemblerBase {
// Never use the int16_t b(l)cond version with a branch offset
// instead of using the Label* version.
- // Jump targets must be in the current 256 MB-aligned region. ie 28 bits.
+ // Jump targets must be in the current 256 MB-aligned region. i.e. 28 bits.
void j(int32_t target);
void jal(int32_t target);
void jalr(Register rs, Register rd = ra);
diff --git a/src/3rdparty/v8/src/mips/builtins-mips.cc b/src/3rdparty/v8/src/mips/builtins-mips.cc
index a46a786..eeb84c3 100644
--- a/src/3rdparty/v8/src/mips/builtins-mips.cc
+++ b/src/3rdparty/v8/src/mips/builtins-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -67,24 +67,42 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
}
- // JumpToExternalReference expects a0 to contain the number of arguments
+ // JumpToExternalReference expects s0 to contain the number of arguments
// including the receiver and the extra arguments.
- __ Addu(a0, a0, Operand(num_extra_args + 1));
+ __ Addu(s0, a0, num_extra_args + 1);
+ __ sll(s1, s0, kPointerSizeLog2);
+ __ Subu(s1, s1, kPointerSize);
__ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
+// Load the built-in InternalArray function from the current context.
+static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
+ Register result) {
+ // Load the global context.
+
+ __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(result,
+ FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
+ // Load the InternalArray function from the global context.
+ __ lw(result,
+ MemOperand(result,
+ Context::SlotOffset(
+ Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
+}
+
+
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
// Load the global context.
__ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ lw(result,
- FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
+ FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
// Load the Array function from the global context.
__ lw(result,
- MemOperand(result,
- Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
+ MemOperand(result,
+ Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
}
@@ -100,9 +118,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
Label* gc_required) {
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- // Load the initial map from the array function.
- __ lw(scratch1, FieldMemOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -198,9 +214,7 @@ static void AllocateJSArray(MacroAssembler* masm,
bool fill_with_hole,
Label* gc_required) {
// Load the initial map from the array function.
- __ lw(elements_array_storage,
- FieldMemOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ Assert(
@@ -308,7 +322,8 @@ static void AllocateJSArray(MacroAssembler* masm,
static void ArrayNativeCode(MacroAssembler* masm,
Label* call_generic_code) {
Counters* counters = masm->isolate()->counters();
- Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array;
+ Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
+ has_non_smi_element, finish, cant_transition_map, not_double;
// Check for array construction with zero arguments or one.
__ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg));
@@ -322,7 +337,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
t1,
call_generic_code);
__ IncrementCounter(counters->array_function_native(), 1, a3, t0);
- // Setup return value, remove receiver from stack and return.
+ // Set up return value, remove receiver from stack and return.
__ mov(v0, a2);
__ Addu(sp, sp, Operand(kPointerSize));
__ Ret();
@@ -365,7 +380,7 @@ static void ArrayNativeCode(MacroAssembler* masm,
call_generic_code);
__ IncrementCounter(counters->array_function_native(), 1, a2, t0);
- // Setup return value, remove receiver and argument from stack and return.
+ // Set up return value, remove receiver and argument from stack and return.
__ mov(v0, a3);
__ Addu(sp, sp, Operand(2 * kPointerSize));
__ Ret();
@@ -400,14 +415,22 @@ static void ArrayNativeCode(MacroAssembler* masm,
// sp[0]: last argument
Label loop, entry;
- __ Branch(&entry);
+ __ Branch(USE_DELAY_SLOT, &entry);
+ __ mov(t3, sp);
__ bind(&loop);
- __ pop(a2);
+ __ lw(a2, MemOperand(t3));
+ if (FLAG_smi_only_arrays) {
+ __ JumpIfNotSmi(a2, &has_non_smi_element);
+ }
+ __ Addu(t3, t3, kPointerSize);
__ Addu(t1, t1, -kPointerSize);
__ sw(a2, MemOperand(t1));
__ bind(&entry);
__ Branch(&loop, lt, t0, Operand(t1));
+ __ bind(&finish);
+ __ mov(sp, t3);
+
// Remove caller arguments and receiver from the stack, setup return value and
// return.
// a0: argc
@@ -416,6 +439,77 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ Addu(sp, sp, Operand(kPointerSize));
__ mov(v0, a3);
__ Ret();
+
+ __ bind(&has_non_smi_element);
+ // Double values are handled by the runtime.
+ __ CheckMap(
+ a2, t5, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
+ __ bind(&cant_transition_map);
+ __ UndoAllocationInNewSpace(a3, t0);
+ __ Branch(call_generic_code);
+
+ __ bind(&not_double);
+ // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // a3: JSArray
+ __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ a2,
+ t5,
+ &cant_transition_map);
+ __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset));
+ __ RecordWriteField(a3,
+ HeapObject::kMapOffset,
+ a2,
+ t5,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ Label loop2;
+ __ bind(&loop2);
+ __ lw(a2, MemOperand(t3));
+ __ Addu(t3, t3, kPointerSize);
+ __ Subu(t1, t1, kPointerSize);
+ __ sw(a2, MemOperand(t1));
+ __ Branch(&loop2, lt, t0, Operand(t1));
+ __ Branch(&finish);
+}
+
+
+void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : number of arguments
+ // -- ra : return address
+ // -- sp[...]: constructor arguments
+ // -----------------------------------
+ Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
+
+ // Get the InternalArray function.
+ GenerateLoadInternalArrayFunction(masm, a1);
+
+ if (FLAG_debug_code) {
+ // Initial map for the builtin InternalArray functions should be maps.
+ __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ And(t0, a2, Operand(kSmiTagMask));
+ __ Assert(ne, "Unexpected initial map for InternalArray function",
+ t0, Operand(zero_reg));
+ __ GetObjectType(a2, a3, t0);
+ __ Assert(eq, "Unexpected initial map for InternalArray function",
+ t0, Operand(MAP_TYPE));
+ }
+
+ // Run the native code for the InternalArray function called as a normal
+ // function.
+ ArrayNativeCode(masm, &generic_array_code);
+
+ // Jump to the generic array code if the specialized code cannot handle the
+ // construction.
+ __ bind(&generic_array_code);
+
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->InternalArrayCodeGeneric();
+ __ Jump(array_code, RelocInfo::CODE_TARGET);
}
@@ -618,7 +712,9 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
-void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
+static void Generate_JSConstructStubHelper(MacroAssembler* masm,
+ bool is_api_function,
+ bool count_constructions) {
// ----------- S t a t e -------------
// -- a0 : number of arguments
// -- a1 : constructor function
@@ -626,46 +722,6 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// -- sp[...]: constructor arguments
// -----------------------------------
- Label slow, non_function_call;
- // Check that the function is not a smi.
- __ And(t0, a1, Operand(kSmiTagMask));
- __ Branch(&non_function_call, eq, t0, Operand(zero_reg));
- // Check that the function is a JSFunction.
- __ GetObjectType(a1, a2, a2);
- __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
-
- // Jump to the function-specific construct stub.
- __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset));
- __ Addu(t9, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(t9);
-
- // a0: number of arguments
- // a1: called object
- // a2: object type
- Label do_call;
- __ bind(&slow);
- __ Branch(&non_function_call, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
- __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
- __ jmp(&do_call);
-
- __ bind(&non_function_call);
- __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ bind(&do_call);
- // CALL_NON_FUNCTION expects the non-function constructor as receiver
- // (instead of the original receiver from the call site). The receiver is
- // stack element argc.
- // Set expected number of arguments to zero (not changing a0).
- __ mov(a2, zero_reg);
- __ SetCallKind(t1, CALL_AS_METHOD);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
- RelocInfo::CODE_TARGET);
-}
-
-
-static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool count_constructions) {
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
@@ -705,8 +761,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Load the initial map and verify that it is in fact a map.
// a1: constructor function
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
- __ And(t0, a2, Operand(kSmiTagMask));
- __ Branch(&rt_call, eq, t0, Operand(zero_reg));
+ __ JumpIfSmi(a2, &rt_call);
__ GetObjectType(a2, a3, t4);
__ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
@@ -834,7 +889,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Initialize the FixedArray.
// a1: constructor
- // a3: number of elements in properties array (un-tagged)
+ // a3: number of elements in properties array (untagged)
// t4: JSObject
// t5: start of next object
__ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
@@ -903,27 +958,20 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// t4: JSObject
__ bind(&allocated);
__ push(t4);
-
- // Push the function and the allocated receiver from the stack.
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ lw(a1, MemOperand(sp, kPointerSize));
- __ MultiPushReversed(a1.bit() | t4.bit());
+ __ push(t4);
// Reload the number of arguments from the stack.
- // a1: constructor function
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ lw(a3, MemOperand(sp, 4 * kPointerSize));
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
+ __ lw(a1, MemOperand(sp, 2 * kPointerSize));
+ __ lw(a3, MemOperand(sp, 3 * kPointerSize));
- // Setup pointer to last argument.
+ // Set up pointer to last argument.
__ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
- // Setup number of arguments for function call below.
+ // Set up number of arguments for function call below.
__ srl(a0, a3, kSmiTagSize);
// Copy arguments and receiver to the expression stack.
@@ -932,10 +980,9 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// a2: address of last argument (caller sp)
// a3: number of arguments (smi-tagged)
// sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
+ // sp[1]: receiver
+ // sp[2]: constructor function
+ // sp[3]: number of arguments (smi-tagged)
Label loop, entry;
__ jmp(&entry);
__ bind(&loop);
@@ -963,13 +1010,10 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
- // Pop the function from the stack.
- // v0: result
- // sp[0]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ Pop();
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
// Restore context from the frame.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -984,8 +1028,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// sp[0]: receiver (newly allocated object)
// sp[1]: constructor function
// sp[2]: number of arguments (smi-tagged)
- __ And(t0, v0, Operand(kSmiTagMask));
- __ Branch(&use_receiver, eq, t0, Operand(zero_reg));
+ __ JumpIfSmi(v0, &use_receiver);
// If the type of the result (stored in its map) is less than
// FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
@@ -1039,7 +1082,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// ----------- S t a t e -------------
// -- a0: code entry
// -- a1: function
- // -- a2: reveiver_pointer
+ // -- a2: receiver_pointer
// -- a3: argc
// -- s0: argv
// -----------------------------------
@@ -1054,17 +1097,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Set up the context from the function argument.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
- // Set up the roots register.
- ExternalReference roots_array_start =
- ExternalReference::roots_array_start(masm->isolate());
- __ li(s6, Operand(roots_array_start));
-
// Push the function and the receiver onto the stack.
__ Push(a1, a2);
// Copy arguments to the stack in a loop.
// a3: argc
- // s0: argv, ie points to first arg
+ // s0: argv, i.e. points to first arg
Label loop, entry;
__ sll(t0, a3, kPointerSizeLog2);
__ addu(t2, s0, t0);
@@ -1093,7 +1131,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code and pass argc as a0.
__ mov(a0, a3);
if (is_construct) {
- __ Call(masm->isolate()->builtins()->JSConstructCall());
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ __ CallStub(&stub);
} else {
ParameterCount actual(a0);
__ InvokeFunction(a1, actual, CALL_FUNCTION,
@@ -1284,8 +1323,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ sll(at, a0, kPointerSizeLog2);
__ addu(at, sp, at);
__ lw(a1, MemOperand(at));
- __ And(at, a1, Operand(kSmiTagMask));
- __ Branch(&non_function, eq, at, Operand(zero_reg));
+ __ JumpIfSmi(a1, &non_function);
__ GetObjectType(a1, a2, a2);
__ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
@@ -1524,8 +1562,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ Branch(&push_receiver, ne, t3, Operand(zero_reg));
// Compute the receiver in non-strict mode.
- __ And(t3, a0, Operand(kSmiTagMask));
- __ Branch(&call_to_object, eq, t3, Operand(zero_reg));
+ __ JumpIfSmi(a0, &call_to_object);
__ LoadRoot(a1, Heap::kNullValueRootIndex);
__ Branch(&use_global_receiver, eq, a0, Operand(a1));
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
@@ -1698,8 +1735,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&too_few);
EnterArgumentsAdaptorFrame(masm);
- // TODO(MIPS): Optimize these loops.
-
// Calculate copy start address into a0 and copy end address is fp.
// a0: actual number of arguments as a smi
// a1: function
@@ -1721,9 +1756,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label copy;
__ bind(&copy);
__ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
- __ push(t0);
+ __ Subu(sp, sp, kPointerSize);
__ Subu(a0, a0, kPointerSize);
- __ Branch(&copy, ne, a0, Operand(t3));
+ __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3));
+ __ sw(t0, MemOperand(sp)); // In the delay slot.
// Fill the remaining expected arguments with undefined.
// a1: function
@@ -1736,8 +1772,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label fill;
__ bind(&fill);
- __ push(t0);
- __ Branch(&fill, ne, sp, Operand(a2));
+ __ Subu(sp, sp, kPointerSize);
+ __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
+ __ sw(t0, MemOperand(sp));
}
// Call the entry point.
@@ -1745,6 +1782,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ Call(a3);
+ // Store offset of return address for deoptimizer.
+ masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ Ret();
diff --git a/src/3rdparty/v8/src/mips/code-stubs-mips.cc b/src/3rdparty/v8/src/mips/code-stubs-mips.cc
index 83a917d..3e7b5bf 100644
--- a/src/3rdparty/v8/src/mips/code-stubs-mips.cc
+++ b/src/3rdparty/v8/src/mips/code-stubs-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -70,13 +70,13 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
// The ToNumber stub takes one argument in a0.
Label check_heap_number, call_builtin;
__ JumpIfNotSmi(a0, &check_heap_number);
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- __ Ret();
__ bind(&check_heap_number);
EmitCheckForHeapNumber(masm, a0, a1, t0, &call_builtin);
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, a0);
- __ Ret();
__ bind(&call_builtin);
__ push(a0);
@@ -100,9 +100,9 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
- int map_index = strict_mode_ == kStrictMode
- ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
- : Context::FUNCTION_MAP_INDEX;
+ int map_index = (language_mode_ == CLASSIC_MODE)
+ ? Context::FUNCTION_MAP_INDEX
+ : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
@@ -128,9 +128,9 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// found in the shared function info object.
__ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
__ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
// Return result. The argument function info has been popped already.
+ __ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset));
__ Ret();
// Create a new closure through the slower runtime call.
@@ -157,26 +157,19 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Load the function from the stack.
__ lw(a3, MemOperand(sp, 0));
- // Setup the object header.
- __ LoadRoot(a2, Heap::kFunctionContextMapRootIndex);
- __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
+ // Set up the object header.
+ __ LoadRoot(a1, Heap::kFunctionContextMapRootIndex);
__ li(a2, Operand(Smi::FromInt(length)));
__ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
+ __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
- // Setup the fixed slots.
+ // Set up the fixed slots, copy the global object from the previous context.
+ __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ li(a1, Operand(Smi::FromInt(0)));
__ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX)));
__ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX)));
__ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX)));
-
- // Copy the global object from the previous context.
- __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX)));
-
- // Copy the qml global object from the surrounding context.
- __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
-
+ __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX)));
// Initialize the rest of the slots to undefined.
__ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
@@ -186,8 +179,7 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Remove the on-stack argument and return.
__ mov(cp, v0);
- __ Pop();
- __ Ret();
+ __ DropAndRet(1);
// Need to collect. Call into runtime system.
__ bind(&gc);
@@ -213,7 +205,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Load the serialized scope info from the stack.
__ lw(a1, MemOperand(sp, 1 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
__ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
__ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
__ li(a2, Operand(Smi::FromInt(length)));
@@ -234,18 +226,12 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
- // Setup the fixed slots.
+ // Set up the fixed slots, copy the global object from the previous context.
+ __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX));
__ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
__ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
__ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
-
- // Copy the global object from the previous context.
- __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
- __ sw(a1, ContextOperand(v0, Context::GLOBAL_INDEX));
-
- // Copy the qml global object from the surrounding context.
- __ lw(a1, ContextOperand(cp, Context::QML_GLOBAL_INDEX));
- __ sw(a1, ContextOperand(v0, Context::QML_GLOBAL_INDEX));
+ __ sw(a2, ContextOperand(v0, Context::GLOBAL_INDEX));
// Initialize the rest of the slots to the hole value.
__ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
@@ -255,8 +241,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Remove the on-stack argument and return.
__ mov(cp, v0);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
// Need to collect. Call into runtime system.
__ bind(&gc);
@@ -264,21 +249,61 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- // [sp]: constant elements.
- // [sp + kPointerSize]: literal index.
- // [sp + (2 * kPointerSize)]: literals array.
+static void GenerateFastCloneShallowArrayCommon(
+ MacroAssembler* masm,
+ int length,
+ FastCloneShallowArrayStub::Mode mode,
+ Label* fail) {
+ // Registers on entry:
+ // a3: boilerplate literal array.
+ ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
- if (length_ > 0) {
- elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
- ? FixedDoubleArray::SizeFor(length_)
- : FixedArray::SizeFor(length_);
+ if (length > 0) {
+ elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ ? FixedDoubleArray::SizeFor(length)
+ : FixedArray::SizeFor(length);
}
int size = JSArray::kSize + elements_size;
+ // Allocate both the JS array and the elements array in one big
+ // allocation. This avoids multiple limit checks.
+ __ AllocateInNewSpace(size,
+ v0,
+ a1,
+ a2,
+ fail,
+ TAG_OBJECT);
+
+ // Copy the JS array part.
+ for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
+ if ((i != JSArray::kElementsOffset) || (length == 0)) {
+ __ lw(a1, FieldMemOperand(a3, i));
+ __ sw(a1, FieldMemOperand(v0, i));
+ }
+ }
+
+ if (length > 0) {
+ // Get hold of the elements array of the boilerplate and setup the
+ // elements pointer in the resulting object.
+ __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
+ __ Addu(a2, v0, Operand(JSArray::kSize));
+ __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
+
+ // Copy the elements array.
+ ASSERT((elements_size % kPointerSize) == 0);
+ __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
+ }
+}
+
+void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [sp]: constant elements.
+ // [sp + kPointerSize]: literal index.
+ // [sp + (2 * kPointerSize)]: literals array.
+
// Load boilerplate object into r3 and check if we need to create a
// boilerplate.
Label slow_case;
@@ -291,17 +316,42 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
__ Branch(&slow_case, eq, a3, Operand(t1));
+ FastCloneShallowArrayStub::Mode mode = mode_;
+ if (mode == CLONE_ANY_ELEMENTS) {
+ Label double_elements, check_fast_elements;
+ __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
+ __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex);
+ __ Branch(&check_fast_elements, ne, v0, Operand(t1));
+ GenerateFastCloneShallowArrayCommon(masm, 0,
+ COPY_ON_WRITE_ELEMENTS, &slow_case);
+ // Return and remove the on-stack parameters.
+ __ DropAndRet(3);
+
+ __ bind(&check_fast_elements);
+ __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
+ __ Branch(&double_elements, ne, v0, Operand(t1));
+ GenerateFastCloneShallowArrayCommon(masm, length_,
+ CLONE_ELEMENTS, &slow_case);
+ // Return and remove the on-stack parameters.
+ __ DropAndRet(3);
+
+ __ bind(&double_elements);
+ mode = CLONE_DOUBLE_ELEMENTS;
+ // Fall through to generate the code to handle double elements.
+ }
+
if (FLAG_debug_code) {
const char* message;
Heap::RootListIndex expected_map_index;
- if (mode_ == CLONE_ELEMENTS) {
+ if (mode == CLONE_ELEMENTS) {
message = "Expected (writable) fixed array";
expected_map_index = Heap::kFixedArrayMapRootIndex;
- } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
+ } else if (mode == CLONE_DOUBLE_ELEMENTS) {
message = "Expected (writable) fixed double array";
expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
} else {
- ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
+ ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
message = "Expected copy-on-write fixed array";
expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
}
@@ -313,42 +363,56 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ pop(a3);
}
- // Allocate both the JS array and the elements array in one big
- // allocation. This avoids multiple limit checks.
- // Return new object in v0.
- __ AllocateInNewSpace(size,
- v0,
- a1,
- a2,
- &slow_case,
- TAG_OBJECT);
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
- // Copy the JS array part.
- for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
- __ lw(a1, FieldMemOperand(a3, i));
- __ sw(a1, FieldMemOperand(v0, i));
- }
- }
+ // Return and remove the on-stack parameters.
+ __ DropAndRet(3);
- if (length_ > 0) {
- // Get hold of the elements array of the boilerplate and setup the
- // elements pointer in the resulting object.
- __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset));
- __ Addu(a2, v0, Operand(JSArray::kSize));
- __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset));
+ __ bind(&slow_case);
+ __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
+}
- // Copy the elements array.
- ASSERT((elements_size % kPointerSize) == 0);
- __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize);
+
+void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [sp]: object literal flags.
+ // [sp + kPointerSize]: constant properties.
+ // [sp + (2 * kPointerSize)]: literal index.
+ // [sp + (3 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into a3 and check if we need to create a
+ // boilerplate.
+ Label slow_case;
+ __ lw(a3, MemOperand(sp, 3 * kPointerSize));
+ __ lw(a0, MemOperand(sp, 2 * kPointerSize));
+ __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(a3, t0, a3);
+ __ lw(a3, MemOperand(a3));
+ __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
+ __ Branch(&slow_case, eq, a3, Operand(t0));
+
+ // Check that the boilerplate contains only fast properties and we can
+ // statically determine the instance size.
+ int size = JSObject::kHeaderSize + length_ * kPointerSize;
+ __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset));
+ __ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset));
+ __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2));
+
+ // Allocate the JS object and copy header together with all in-object
+ // properties from the boilerplate.
+ __ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT);
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ lw(a1, FieldMemOperand(a3, i));
+ __ sw(a1, FieldMemOperand(v0, i));
}
// Return and remove the on-stack parameters.
- __ Addu(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ DropAndRet(4);
__ bind(&slow_case);
- __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1);
+ __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
}
@@ -409,7 +473,7 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
__ And(exponent, source_, Operand(HeapNumber::kSignMask));
// Subtract from 0 if source was negative.
__ subu(at, zero_reg, source_);
- __ movn(source_, at, exponent);
+ __ Movn(source_, at, exponent);
// We have -1, 0 or 1, which we treat specially. Register source_ contains
// absolute value: it is either equal to 1 (special case of -1 and 1),
@@ -417,19 +481,19 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
__ Branch(&not_special, gt, source_, Operand(1));
// For 1 or -1 we need to or in the 0 exponent (biased to 1023).
- static const uint32_t exponent_word_for_1 =
+ const uint32_t exponent_word_for_1 =
HeapNumber::kExponentBias << HeapNumber::kExponentShift;
// Safe to use 'at' as dest reg here.
__ Or(at, exponent, Operand(exponent_word_for_1));
- __ movn(exponent, at, source_); // Write exp when source not 0.
+ __ Movn(exponent, at, source_); // Write exp when source not 0.
// 1, 0 and -1 all have 0 for the second word.
+ __ Ret(USE_DELAY_SLOT);
__ mov(mantissa, zero_reg);
- __ Ret();
__ bind(&not_special);
// Count leading zeros.
// Gets the wrong answer for 0, but we already checked for that case above.
- __ clz(zeros_, source_);
+ __ Clz(zeros_, source_);
// Compute exponent and or it into the exponent register.
// We use mantissa as a scratch register here.
__ li(mantissa, Operand(31 + HeapNumber::kExponentBias));
@@ -445,9 +509,9 @@ void ConvertToDoubleStub::Generate(MacroAssembler* masm) {
__ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord);
// And the top (top 20 bits).
__ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord);
- __ or_(exponent, exponent, source_);
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ or_(exponent, exponent, source_);
}
@@ -519,7 +583,9 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Label is_smi, done;
- __ JumpIfSmi(object, &is_smi);
+ // Smi-check
+ __ UntagAndJumpIfSmi(scratch1, object, &is_smi);
+ // Heap number check
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number);
// Handle loading a double from a heap number.
@@ -546,7 +612,6 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
if (CpuFeatures::IsSupported(FPU)) {
CpuFeatures::Scope scope(FPU);
// Convert smi to double using FPU instructions.
- __ SmiUntag(scratch1, object);
__ mtc1(scratch1, dst);
__ cvt_d_w(dst, dst);
if (destination == kCoreRegisters) {
@@ -581,11 +646,10 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Heap::kHeapNumberMapRootIndex,
"HeapNumberMap register clobbered.");
}
- Label is_smi;
Label done;
Label not_in_int32_range;
- __ JumpIfSmi(object, &is_smi);
+ __ UntagAndJumpIfSmi(dst, object, &done);
__ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset));
__ Branch(not_number, ne, scratch1, Operand(heap_number_map));
__ ConvertToInt32(object,
@@ -605,10 +669,6 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
scratch2,
scratch3);
- __ jmp(&done);
-
- __ bind(&is_smi);
- __ SmiUntag(dst, object);
__ bind(&done);
}
@@ -653,10 +713,10 @@ void FloatingPointHelper::ConvertIntToDouble(MacroAssembler* masm,
__ Subu(int_scratch, zero_reg, int_scratch);
__ bind(&skip_sub);
- // Get mantisssa[51:20].
+ // Get mantissa[51:20].
// Get the position of the first set bit.
- __ clz(dst1, int_scratch);
+ __ Clz(dst1, int_scratch);
__ li(scratch2, 31);
__ Subu(dst1, scratch2, dst1);
@@ -790,10 +850,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
Label done;
- // Untag the object into the destination register.
- __ SmiUntag(dst, object);
- // Just return if the object is a smi.
- __ JumpIfSmi(object, &done);
+ __ UntagAndJumpIfSmi(dst, object, &done);
if (FLAG_debug_code) {
__ AbortIfNotRootValue(heap_number_map,
@@ -898,7 +955,7 @@ void FloatingPointHelper::DoubleIs32BitInteger(MacroAssembler* masm,
// non zero bits left. So we need the (30 - exponent) last bits of the
// 31 higher bits of the mantissa to be null.
// Because bits [21:0] are null, we can check instead that the
- // (32 - exponent) last bits of the 32 higher bits of the mantisssa are null.
+ // (32 - exponent) last bits of the 32 higher bits of the mantissa are null.
// Get the 32 higher bits of the mantissa in dst.
__ Ext(dst,
@@ -963,9 +1020,9 @@ void FloatingPointHelper::CallCCodeForDoubleOperation(
__ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset));
}
// Place heap_number_result in v0 and return to the pushed return address.
- __ mov(v0, heap_number_result);
__ pop(ra);
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, heap_number_result);
}
@@ -1017,7 +1074,7 @@ void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
__ or_(scratch_, scratch_, sign_);
// Subtract from 0 if the value was negative.
__ subu(at, zero_reg, the_int_);
- __ movn(the_int_, at, sign_);
+ __ Movn(the_int_, at, sign_);
// We should be masking the implict first digit of the mantissa away here,
// but it just ends up combining harmlessly with the last digit of the
// exponent that happens to be 1. The sign bit is 0 so we shift 10 to get
@@ -1101,6 +1158,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
}
__ bind(&return_equal);
+
if (cc == less) {
__ li(v0, Operand(GREATER)); // Things aren't less than themselves.
} else if (cc == greater) {
@@ -1165,16 +1223,15 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
(lhs.is(a1) && rhs.is(a0)));
Label lhs_is_smi;
- __ And(t0, lhs, Operand(kSmiTagMask));
- __ Branch(&lhs_is_smi, eq, t0, Operand(zero_reg));
+ __ JumpIfSmi(lhs, &lhs_is_smi);
// Rhs is a Smi.
// Check whether the non-smi is a heap number.
__ GetObjectType(lhs, t4, t4);
if (strict) {
// If lhs was not a number and rhs was a Smi then strict equality cannot
// succeed. Return non-equal (lhs is already not zero).
+ __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
__ mov(v0, lhs);
- __ Ret(ne, t4, Operand(HEAP_NUMBER_TYPE));
} else {
// Smi compared non-strictly with a non-Smi non-heap-number. Call
// the runtime.
@@ -1212,8 +1269,8 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
if (strict) {
// If lhs was not a number and rhs was a Smi then strict equality cannot
// succeed. Return non-equal.
+ __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE));
__ li(v0, Operand(1));
- __ Ret(ne, t4, Operand(HEAP_NUMBER_TYPE));
} else {
// Smi compared non-strictly with a non-Smi non-heap-number. Call
// the runtime.
@@ -1293,12 +1350,13 @@ void EmitNanCheck(MacroAssembler* masm, Condition cc) {
__ bind(&one_is_nan);
// NaN comparisons always fail.
// Load whatever we need in v0 to make the comparison fail.
+
if (cc == lt || cc == le) {
__ li(v0, Operand(GREATER));
} else {
__ li(v0, Operand(LESS));
}
- __ Ret(); // Return.
+ __ Ret();
__ bind(&neither_is_nan);
}
@@ -1347,6 +1405,7 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
__ Branch(&return_result_not_equal, ne, t4, Operand(zero_reg));
__ bind(&return_result_equal);
+
__ li(v0, Operand(EQUAL));
__ Ret();
}
@@ -1378,6 +1437,7 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
__ BranchF(&less_than, NULL, lt, f12, f14);
// Not equal, not less, not NaN, must be greater.
+
__ li(v0, Operand(GREATER));
__ Ret();
@@ -1408,8 +1468,8 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
// Return non-zero.
Label return_not_equal;
__ bind(&return_not_equal);
+ __ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(1));
- __ Ret();
__ bind(&first_non_object);
// Check for oddballs: true, false, null, undefined.
@@ -1488,8 +1548,8 @@ static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
// Both are symbols. We already checked they weren't the same pointer
// so they are not equal.
+ __ Ret(USE_DELAY_SLOT);
__ li(v0, Operand(1)); // Non-zero indicates not equal.
- __ Ret();
__ bind(&object_test);
__ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
@@ -1504,8 +1564,8 @@ static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
__ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset));
__ and_(a0, a2, a3);
__ And(a0, a0, Operand(1 << Map::kIsUndetectable));
- __ Xor(v0, a0, Operand(1 << Map::kIsUndetectable));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ xori(v0, a0, 1 << Map::kIsUndetectable);
}
@@ -1612,8 +1672,7 @@ void NumberToStringStub::Generate(MacroAssembler* masm) {
// Generate code to lookup number in the number string cache.
GenerateLookupNumberStringCache(masm, a1, v0, a2, a3, t0, false, &runtime);
- __ Addu(sp, sp, Operand(1 * kPointerSize));
- __ Ret();
+ __ DropAndRet(1);
__ bind(&runtime);
// Handle number to string in the runtime system if not found in the cache.
@@ -1635,8 +1694,8 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ JumpIfNotSmi(a2, &not_two_smis);
__ sra(a1, a1, 1);
__ sra(a0, a0, 1);
- __ Subu(v0, a1, a0);
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ subu(v0, a1, a0);
__ bind(&not_two_smis);
} else if (FLAG_debug_code) {
__ Or(a2, a1, a0);
@@ -1648,46 +1707,6 @@ void CompareStub::Generate(MacroAssembler* masm) {
// NOTICE! This code is only reached after a smi-fast-case check, so
// it is certain that at least one operand isn't a smi.
- {
- // This is optimized for reading the code and not benchmarked for
- // speed or amount of instructions. The code is not ordered for speed
- // or anything like this
- Label miss, user_compare;
-
- // No global compare if both operands are SMIs
- __ And(a2, a1, Operand(a0));
- __ JumpIfSmi(a2, &miss);
-
-
- // We need to check if lhs and rhs are both objects, if not we are
- // jumping out of the function. We will keep the 'map' in t0 (lhs) and
- // t1 (rhs) for later usage.
- __ GetObjectType(a0, t0, a3);
- __ Branch(&miss, ne, a3, Operand(JS_OBJECT_TYPE));
-
- __ GetObjectType(a1, t1, a3);
- __ Branch(&miss, ne, a3, Operand(JS_OBJECT_TYPE));
-
- // Check if the UseUserComparison flag is set by using the map of t0 for lhs
- __ lbu(t0, FieldMemOperand(t0, Map::kBitField2Offset));
- __ And(t0, t0, Operand(1 << Map::kUseUserObjectComparison));
- __ Branch(&user_compare, eq, t0, Operand(1 << Map::kUseUserObjectComparison));
-
-
- // Check if the UseUserComparison flag is _not_ set by using the map of t1 for
- // rhs and then jump to the miss label.
- __ lbu(t1, FieldMemOperand(t1, Map::kBitField2Offset));
- __ And(t1, t1, Operand(1 << Map::kUseUserObjectComparison));
- __ Branch(&miss, ne, t1, Operand(1 << Map::kUseUserObjectComparison));
-
- // Invoke the runtime function here
- __ bind(&user_compare);
- __ Push(a0, a1);
- __ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
- // We exit here without doing anything
- __ bind(&miss);
- }
// Handle the case where the objects are identical. Either returns the answer
// or goes to slow. Only falls through if the objects were not identical.
@@ -1729,15 +1748,15 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Check if LESS condition is satisfied. If true, move conditionally
// result to v0.
__ c(OLT, D, f12, f14);
- __ movt(v0, t0);
+ __ Movt(v0, t0);
// Use previous check to store conditionally to v0 oposite condition
// (GREATER). If rhs is equal to lhs, this will be corrected in next
// check.
- __ movf(v0, t1);
+ __ Movf(v0, t1);
// Check if EQUAL condition is satisfied. If true, move conditionally
// result to v0.
__ c(EQ, D, f12, f14);
- __ movt(v0, t2);
+ __ Movt(v0, t2);
__ Ret();
@@ -1878,7 +1897,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
__ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset));
__ And(at, at, Operand(1 << Map::kIsUndetectable));
// Undetectable -> false.
- __ movn(tos_, zero_reg, at);
+ __ Movn(tos_, zero_reg, at);
__ Ret(ne, at, Operand(zero_reg));
}
}
@@ -1895,8 +1914,8 @@ void ToBooleanStub::Generate(MacroAssembler* masm) {
__ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset));
Label skip;
__ Branch(&skip, ge, at, Operand(FIRST_NONSTRING_TYPE));
+ __ Ret(USE_DELAY_SLOT); // the string length is OK as the return value
__ lw(tos_, FieldMemOperand(tos_, String::kLengthOffset));
- __ Ret(); // the string length is OK as the return value
__ bind(&skip);
}
@@ -1934,7 +1953,7 @@ void ToBooleanStub::CheckOddball(MacroAssembler* masm,
// The value of a root is never NULL, so we can avoid loading a non-null
// value into tos_ when we want to return 'true'.
if (!result) {
- __ movz(tos_, zero_reg, at);
+ __ Movz(tos_, zero_reg, at);
}
__ Ret(eq, at, Operand(zero_reg));
}
@@ -2071,8 +2090,8 @@ void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
__ Branch(slow, eq, t0, Operand(zero_reg));
// Return '0 - value'.
- __ Subu(v0, zero_reg, a0);
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ subu(v0, zero_reg, a0);
}
@@ -2402,8 +2421,8 @@ void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
// Negating it results in 'lt'.
__ Branch(&skip, lt, scratch2, Operand(zero_reg));
ASSERT(Smi::FromInt(0) == 0);
- __ mov(v0, zero_reg);
- __ Ret(); // Return smi 0 if the non-zero one was positive.
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, zero_reg); // Return smi 0 if the non-zero one was positive.
__ bind(&skip);
// We fall through here if we multiplied a negative number with 0, because
// that would mean we should produce -0.
@@ -2458,23 +2477,23 @@ void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) {
}
break;
case Token::BIT_OR:
- __ Or(v0, left, Operand(right));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ or_(v0, left, right);
break;
case Token::BIT_AND:
- __ And(v0, left, Operand(right));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ and_(v0, left, right);
break;
case Token::BIT_XOR:
- __ Xor(v0, left, Operand(right));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ xor_(v0, left, right);
break;
case Token::SAR:
// Remove tags from right operand.
__ GetLeastBitsFromSmi(scratch1, right, 5);
__ srav(scratch1, left, scratch1);
// Smi tag result.
- __ And(v0, scratch1, Operand(~kSmiTagMask));
+ __ And(v0, scratch1, ~kSmiTagMask);
__ Ret();
break;
case Token::SHR:
@@ -2586,8 +2605,8 @@ void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
// kValueOffset. On MIPS this workaround is built into sdc1 so
// there's no point in generating even more instructions.
__ sdc1(f10, FieldMemOperand(result, HeapNumber::kValueOffset));
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, result);
- __ Ret();
} else {
// Call the C function to handle the double operation.
FloatingPointHelper::CallCCodeForDoubleOperation(masm,
@@ -3410,6 +3429,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ Branch(&calculate, ne, a2, Operand(t0));
__ Branch(&calculate, ne, a3, Operand(t1));
// Cache hit. Load result, cleanup and return.
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(
+ counters->transcendental_cache_hit(), 1, scratch0, scratch1);
if (tagged) {
// Pop input value from stack and load result into v0.
__ Drop(1);
@@ -3422,6 +3444,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
} // if (CpuFeatures::IsSupported(FPU))
__ bind(&calculate);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(
+ counters->transcendental_cache_miss(), 1, scratch0, scratch1);
if (tagged) {
__ bind(&invalid_cache);
__ TailCallExternalReference(ExternalReference(RuntimeFunction(),
@@ -3455,8 +3480,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ sw(a3, MemOperand(cache_entry, 1 * kPointerSize));
__ sw(t2, MemOperand(cache_entry, 2 * kPointerSize));
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, cache_entry);
- __ Ret();
__ bind(&invalid_cache);
// The cache is invalid. Call runtime which will recreate the
@@ -3505,20 +3530,25 @@ void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
__ mov_d(f12, f4);
}
AllowExternalCallThatCantCauseGC scope(masm);
+ Isolate* isolate = masm->isolate();
switch (type_) {
case TranscendentalCache::SIN:
__ CallCFunction(
- ExternalReference::math_sin_double_function(masm->isolate()),
+ ExternalReference::math_sin_double_function(isolate),
0, 1);
break;
case TranscendentalCache::COS:
__ CallCFunction(
- ExternalReference::math_cos_double_function(masm->isolate()),
+ ExternalReference::math_cos_double_function(isolate),
+ 0, 1);
+ break;
+ case TranscendentalCache::TAN:
+ __ CallCFunction(ExternalReference::math_tan_double_function(isolate),
0, 1);
break;
case TranscendentalCache::LOG:
__ CallCFunction(
- ExternalReference::math_log_double_function(masm->isolate()),
+ ExternalReference::math_log_double_function(isolate),
0, 1);
break;
default:
@@ -3534,6 +3564,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
// Add more cases when necessary.
case TranscendentalCache::SIN: return Runtime::kMath_sin;
case TranscendentalCache::COS: return Runtime::kMath_cos;
+ case TranscendentalCache::TAN: return Runtime::kMath_tan;
case TranscendentalCache::LOG: return Runtime::kMath_log;
default:
UNIMPLEMENTED();
@@ -3547,114 +3578,224 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
}
-void MathPowStub::Generate(MacroAssembler* masm) {
- Label call_runtime;
-
- if (CpuFeatures::IsSupported(FPU)) {
- CpuFeatures::Scope scope(FPU);
-
- Label base_not_smi;
- Label exponent_not_smi;
- Label convert_exponent;
-
- const Register base = a0;
- const Register exponent = a2;
- const Register heapnumbermap = t1;
- const Register heapnumber = s0; // Callee-saved register.
- const Register scratch = t2;
- const Register scratch2 = t3;
+void InterruptStub::Generate(MacroAssembler* masm) {
+ __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
+}
- // Alocate FP values in the ABI-parameter-passing regs.
- const DoubleRegister double_base = f12;
- const DoubleRegister double_exponent = f14;
- const DoubleRegister double_result = f0;
- const DoubleRegister double_scratch = f2;
- __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
+void MathPowStub::Generate(MacroAssembler* masm) {
+ CpuFeatures::Scope fpu_scope(FPU);
+ const Register base = a1;
+ const Register exponent = a2;
+ const Register heapnumbermap = t1;
+ const Register heapnumber = v0;
+ const DoubleRegister double_base = f2;
+ const DoubleRegister double_exponent = f4;
+ const DoubleRegister double_result = f0;
+ const DoubleRegister double_scratch = f6;
+ const FPURegister single_scratch = f8;
+ const Register scratch = t5;
+ const Register scratch2 = t3;
+
+ Label call_runtime, done, int_exponent;
+ if (exponent_type_ == ON_STACK) {
+ Label base_is_smi, unpack_exponent;
+ // The exponent and base are supplied as arguments on the stack.
+ // This can only happen if the stub is called from non-optimized code.
+ // Load input parameters from stack to double registers.
__ lw(base, MemOperand(sp, 1 * kPointerSize));
__ lw(exponent, MemOperand(sp, 0 * kPointerSize));
- // Convert base to double value and store it in f0.
- __ JumpIfNotSmi(base, &base_not_smi);
- // Base is a Smi. Untag and convert it.
- __ SmiUntag(base);
- __ mtc1(base, double_scratch);
- __ cvt_d_w(double_base, double_scratch);
- __ Branch(&convert_exponent);
+ __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
- __ bind(&base_not_smi);
+ __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
__ lw(scratch, FieldMemOperand(base, JSObject::kMapOffset));
__ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
- // Base is a heapnumber. Load it into double register.
+
__ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset));
+ __ jmp(&unpack_exponent);
- __ bind(&convert_exponent);
- __ JumpIfNotSmi(exponent, &exponent_not_smi);
- __ SmiUntag(exponent);
-
- // The base is in a double register and the exponent is
- // an untagged smi. Allocate a heap number and call a
- // C function for integer exponents. The register containing
- // the heap number is callee-saved.
- __ AllocateHeapNumber(heapnumber,
- scratch,
- scratch2,
- heapnumbermap,
- &call_runtime);
- __ push(ra);
- __ PrepareCallCFunction(1, 1, scratch);
- __ SetCallCDoubleArguments(double_base, exponent);
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ CallCFunction(
- ExternalReference::power_double_int_function(masm->isolate()), 1, 1);
- __ pop(ra);
- __ GetCFunctionDoubleResult(double_result);
- }
- __ sdc1(double_result,
- FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- __ mov(v0, heapnumber);
- __ DropAndRet(2 * kPointerSize);
+ __ bind(&base_is_smi);
+ __ mtc1(scratch, single_scratch);
+ __ cvt_d_w(double_base, single_scratch);
+ __ bind(&unpack_exponent);
+
+ __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
- __ bind(&exponent_not_smi);
__ lw(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
__ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap));
- // Exponent is a heapnumber. Load it into double register.
__ ldc1(double_exponent,
FieldMemOperand(exponent, HeapNumber::kValueOffset));
+ } else if (exponent_type_ == TAGGED) {
+ // Base is already in double_base.
+ __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
+
+ __ ldc1(double_exponent,
+ FieldMemOperand(exponent, HeapNumber::kValueOffset));
+ }
+
+ if (exponent_type_ != INTEGER) {
+ Label int_exponent_convert;
+ // Detect integer exponents stored as double.
+ __ EmitFPUTruncate(kRoundToMinusInf,
+ single_scratch,
+ double_exponent,
+ scratch,
+ scratch2,
+ kCheckForInexactConversion);
+ // scratch2 == 0 means there was no conversion error.
+ __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
+
+ if (exponent_type_ == ON_STACK) {
+ // Detect square root case. Crankshaft detects constant +/-0.5 at
+ // compile time and uses DoMathPowHalf instead. We then skip this check
+ // for non-constant cases of +/-0.5 as these hardly occur.
+ Label not_plus_half;
+
+ // Test for 0.5.
+ __ Move(double_scratch, 0.5);
+ __ BranchF(USE_DELAY_SLOT,
+ &not_plus_half,
+ NULL,
+ ne,
+ double_exponent,
+ double_scratch);
+ // double_scratch can be overwritten in the delay slot.
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
+ __ Move(double_scratch, -V8_INFINITY);
+ __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
+ __ neg_d(double_result, double_scratch);
+
+ // Add +0 to convert -0 to +0.
+ __ add_d(double_scratch, double_base, kDoubleRegZero);
+ __ sqrt_d(double_result, double_scratch);
+ __ jmp(&done);
+
+ __ bind(&not_plus_half);
+ __ Move(double_scratch, -0.5);
+ __ BranchF(USE_DELAY_SLOT,
+ &call_runtime,
+ NULL,
+ ne,
+ double_exponent,
+ double_scratch);
+ // double_scratch can be overwritten in the delay slot.
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
+ __ Move(double_scratch, -V8_INFINITY);
+ __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch);
+ __ Move(double_result, kDoubleRegZero);
+
+ // Add +0 to convert -0 to +0.
+ __ add_d(double_scratch, double_base, kDoubleRegZero);
+ __ Move(double_result, 1);
+ __ sqrt_d(double_scratch, double_scratch);
+ __ div_d(double_result, double_result, double_scratch);
+ __ jmp(&done);
+ }
- // The base and the exponent are in double registers.
- // Allocate a heap number and call a C function for
- // double exponents. The register containing
- // the heap number is callee-saved.
- __ AllocateHeapNumber(heapnumber,
- scratch,
- scratch2,
- heapnumbermap,
- &call_runtime);
__ push(ra);
- __ PrepareCallCFunction(0, 2, scratch);
- // ABI (o32) for func(double a, double b): a in f12, b in f14.
- ASSERT(double_base.is(f12));
- ASSERT(double_exponent.is(f14));
- __ SetCallCDoubleArguments(double_base, double_exponent);
{
AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(0, 2, scratch);
+ __ SetCallCDoubleArguments(double_base, double_exponent);
__ CallCFunction(
ExternalReference::power_double_double_function(masm->isolate()),
- 0,
- 2);
- __ pop(ra);
- __ GetCFunctionDoubleResult(double_result);
+ 0, 2);
}
+ __ pop(ra);
+ __ GetCFunctionDoubleResult(double_result);
+ __ jmp(&done);
+
+ __ bind(&int_exponent_convert);
+ __ mfc1(scratch, single_scratch);
+ }
+
+ // Calculate power with integer exponent.
+ __ bind(&int_exponent);
+
+ // Get two copies of exponent in the registers scratch and exponent.
+ if (exponent_type_ == INTEGER) {
+ __ mov(scratch, exponent);
+ } else {
+ // Exponent has previously been stored into scratch as untagged integer.
+ __ mov(exponent, scratch);
+ }
+
+ __ mov_d(double_scratch, double_base); // Back up base.
+ __ Move(double_result, 1.0);
+
+ // Get absolute value of exponent.
+ Label positive_exponent;
+ __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
+ __ Subu(scratch, zero_reg, scratch);
+ __ bind(&positive_exponent);
+
+ Label while_true, no_carry, loop_end;
+ __ bind(&while_true);
+
+ __ And(scratch2, scratch, 1);
+
+ __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
+ __ mul_d(double_result, double_result, double_scratch);
+ __ bind(&no_carry);
+
+ __ sra(scratch, scratch, 1);
+
+ __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
+ __ mul_d(double_scratch, double_scratch, double_scratch);
+
+ __ Branch(&while_true);
+
+ __ bind(&loop_end);
+
+ __ Branch(&done, ge, exponent, Operand(zero_reg));
+ __ Move(double_scratch, 1.0);
+ __ div_d(double_result, double_scratch, double_result);
+ // Test whether result is zero. Bail out to check for subnormal result.
+ // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
+ __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero);
+
+ // double_exponent may not contain the exponent value if the input was a
+ // smi. We set it with exponent value before bailing out.
+ __ mtc1(exponent, single_scratch);
+ __ cvt_d_w(double_exponent, single_scratch);
+
+ // Returning or bailing out.
+ Counters* counters = masm->isolate()->counters();
+ if (exponent_type_ == ON_STACK) {
+ // The arguments are still on the stack.
+ __ bind(&call_runtime);
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+
+ // The stub is called from non-optimized code, which expects the result
+ // as heap number in exponent.
+ __ bind(&done);
+ __ AllocateHeapNumber(
+ heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
__ sdc1(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
- __ mov(v0, heapnumber);
- __ DropAndRet(2 * kPointerSize);
- }
+ ASSERT(heapnumber.is(v0));
+ __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
+ __ DropAndRet(2);
+ } else {
+ __ push(ra);
+ {
+ AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(0, 2, scratch);
+ __ SetCallCDoubleArguments(double_base, double_exponent);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()),
+ 0, 2);
+ }
+ __ pop(ra);
+ __ GetCFunctionDoubleResult(double_result);
- __ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+ __ bind(&done);
+ __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
+ __ Ret();
+ }
}
@@ -3694,17 +3835,6 @@ void CEntryStub::GenerateAheadOfTime() {
}
-void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- __ Throw(v0);
-}
-
-
-void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
- UncatchableExceptionType type) {
- __ ThrowUncatchable(type, v0);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
@@ -3734,9 +3864,10 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
__ sw(a1, MemOperand(a0));
}
- // Prepare arguments for C routine: a0 = argc, a1 = argv
+ // Prepare arguments for C routine.
+ // a0 = argc
__ mov(a0, s0);
- __ mov(a1, s1);
+ // a1 = argv (set in the delay slot after find_ra below).
// We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
// also need to reserve the 4 argument slots on the stack.
@@ -3756,30 +3887,28 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// coverage code can interfere with the proper calculation of ra.
Label find_ra;
masm->bal(&find_ra); // bal exposes branch delay slot.
- masm->nop(); // Branch delay slot nop.
+ masm->mov(a1, s1);
masm->bind(&find_ra);
// Adjust the value in ra to point to the correct return location, 2nd
// instruction past the real call into C code (the jalr(t9)), and push it.
// This is the return address of the exit frame.
- const int kNumInstructionsToJump = 6;
+ const int kNumInstructionsToJump = 5;
masm->Addu(ra, ra, kNumInstructionsToJump * kPointerSize);
masm->sw(ra, MemOperand(sp)); // This spot was reserved in EnterExitFrame.
- masm->Subu(sp, sp, kCArgsSlotsSize);
+ // Stack space reservation moved to the branch delay slot below.
// Stack is still aligned.
// Call the C routine.
masm->mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
masm->jalr(t9);
- masm->nop(); // Branch delay slot nop.
+ // Set up sp in the delay slot.
+ masm->addiu(sp, sp, -kCArgsSlotsSize);
// Make sure the stored 'ra' points to this position.
ASSERT_EQ(kNumInstructionsToJump,
masm->InstructionsGeneratedSince(&find_ra));
}
- // Restore stack (remove arg slots).
- __ Addu(sp, sp, kCArgsSlotsSize);
-
if (always_allocate) {
// It's okay to clobber a2 and a3 here. v0 & v1 contain result.
__ li(a2, Operand(scope_depth));
@@ -3793,14 +3922,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
__ addiu(a2, v0, 1);
__ andi(t0, a2, kFailureTagMask);
- __ Branch(&failure_returned, eq, t0, Operand(zero_reg));
+ __ Branch(USE_DELAY_SLOT, &failure_returned, eq, t0, Operand(zero_reg));
+ // Restore stack (remove arg slots) in branch delay slot.
+ __ addiu(sp, sp, kCArgsSlotsSize);
+
// Exit C frame and return.
// v0:v1: result
// sp: stack pointer
// fp: frame pointer
- __ LeaveExitFrame(save_doubles_, s0);
- __ Ret();
+ __ LeaveExitFrame(save_doubles_, s0, true);
// Check if we should retry or throw exception.
Label retry;
@@ -3811,11 +3942,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Special handling of out of memory exceptions.
Failure* out_of_memory = Failure::OutOfMemoryException();
- __ Branch(throw_out_of_memory_exception, eq,
- v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ Branch(USE_DELAY_SLOT,
+ throw_out_of_memory_exception,
+ eq,
+ v0,
+ Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ // If we throw the OOM exception, the value of a3 doesn't matter.
+ // Any instruction can be in the delay slot that's not a jump.
// Retrieve the pending exception and clear the variable.
- __ li(a3, Operand(isolate->factory()->the_hole_value()));
+ __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ lw(v0, MemOperand(t0));
@@ -3823,8 +3959,8 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
// Special handling of termination exceptions which are uncatchable
// by javascript code.
- __ Branch(throw_termination_exception, eq,
- v0, Operand(isolate->factory()->termination_exception()));
+ __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
+ __ Branch(throw_termination_exception, eq, v0, Operand(t0));
// Handle normal exception.
__ jmp(throw_normal_exception);
@@ -3836,8 +3972,9 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
void CEntryStub::Generate(MacroAssembler* masm) {
// Called from JavaScript; parameters are on stack as if calling JS function
- // a0: number of arguments including receiver
- // a1: pointer to builtin function
+ // s0: number of arguments including receiver
+ // s1: size of arguments excluding receiver
+ // s2: pointer to builtin function
// fp: frame pointer (restored after C call)
// sp: stack pointer (restored as callee's sp after C call)
// cp: current context (C callee-saved)
@@ -3847,19 +3984,18 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// this by performing a garbage collection and retrying the
// builtin once.
+ // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
+ // The reason for this is that these arguments would need to be saved anyway
+ // so it's faster to set them up directly.
+ // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction.
+
// Compute the argv pointer in a callee-saved register.
- __ sll(s1, a0, kPointerSizeLog2);
__ Addu(s1, sp, s1);
- __ Subu(s1, s1, Operand(kPointerSize));
// Enter the exit frame that transitions from JavaScript to C++.
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(save_doubles_);
- // Setup argc and the builtin function in callee-saved registers.
- __ mov(s0, a0);
- __ mov(s2, a1);
-
// s0: number of arguments (C callee-saved)
// s1: pointer to first argument (C callee-saved)
// s2: pointer to builtin function (C callee-saved)
@@ -3895,24 +4031,38 @@ void CEntryStub::Generate(MacroAssembler* masm) {
true);
__ bind(&throw_out_of_memory_exception);
- GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
+ // Set external caught exception to false.
+ Isolate* isolate = masm->isolate();
+ ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
+ isolate);
+ __ li(a0, Operand(false, RelocInfo::NONE));
+ __ li(a2, Operand(external_caught));
+ __ sw(a0, MemOperand(a2));
+
+ // Set pending exception and v0 to out of memory exception.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ __ li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
+ isolate)));
+ __ sw(v0, MemOperand(a2));
+ // Fall through to the next label.
__ bind(&throw_termination_exception);
- GenerateThrowUncatchable(masm, TERMINATION);
+ __ ThrowUncatchable(v0);
__ bind(&throw_normal_exception);
- GenerateThrowTOS(masm);
+ __ Throw(v0);
}
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
- Label invoke, exit;
+ Label invoke, handler_entry, exit;
Isolate* isolate = masm->isolate();
// Registers:
// a0: entry address
// a1: function
- // a2: reveiver
+ // a2: receiver
// a3: argc
//
// Stack:
@@ -3937,6 +4087,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
}
+ __ InitializeRootRegister();
__ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
// We build an EntryFrame.
@@ -3948,13 +4099,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
isolate)));
__ lw(t0, MemOperand(t0));
__ Push(t3, t2, t1, t0);
- // Setup frame pointer for the frame to be pushed.
+ // Set up frame pointer for the frame to be pushed.
__ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
// Registers:
// a0: entry_address
// a1: function
- // a2: reveiver_pointer
+ // a2: receiver_pointer
// a3: argc
// s0: argv
//
@@ -3983,14 +4134,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ bind(&cont);
__ push(t0);
- // Call a faked try-block that does the invoke.
- __ bal(&invoke); // bal exposes branch delay slot.
- __ nop(); // Branch delay slot nop.
-
- // Caught exception: Store result (exception) in the pending
- // exception field in the JSEnv and return a failure sentinel.
- // Coming in here the fp will be invalid because the PushTryHandler below
- // sets it to 0 to signal the existence of the JSEntry frame.
+ // Jump to a faked try block that does the invoke, with a faked catch
+ // block that sets the pending exception.
+ __ jmp(&invoke);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel. Coming in here the
+ // fp will be invalid because the PushTryHandler below sets it to 0 to
+ // signal the existence of the JSEntry frame.
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
@@ -3998,16 +4150,17 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ b(&exit); // b exposes branch delay slot.
__ nop(); // Branch delay slot nop.
- // Invoke: Link this frame into the handler chain.
+ // Invoke: Link this frame into the handler chain. There's only one
+ // handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// If an exception not caught by another handler occurs, this handler
// returns control to the code after the bal(&invoke) above, which
// restores all kCalleeSaved registers (including cp and fp) to their
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ li(t1, Operand(isolate->factory()->the_hole_value()));
+ __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ sw(t1, MemOperand(t0));
@@ -4019,7 +4172,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Registers:
// a0: entry_address
// a1: function
- // a2: reveiver_pointer
+ // a2: receiver_pointer
// a3: argc
// s0: argv
//
@@ -4051,7 +4204,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2;
__ pop(t1);
- __ Branch(&non_outermost_js_2, ne, t1,
+ __ Branch(&non_outermost_js_2,
+ ne,
+ t1,
Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
__ li(t1, Operand(ExternalReference(js_entry_sp)));
__ sw(zero_reg, MemOperand(t1));
@@ -4101,7 +4256,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
const Register inline_site = t5;
const Register scratch = a2;
- const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;
+ const int32_t kDeltaToLoadBoolResult = 5 * kPointerSize;
Label slow, loop, is_instance, is_not_instance, not_js_object;
@@ -4145,11 +4300,12 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Patch the (relocated) inlined map check.
// The offset was stored in t0 safepoint slot.
- // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal)
+ // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
__ LoadFromSafepointRegisterSlot(scratch, t0);
__ Subu(inline_site, ra, scratch);
- // Patch the relocated value to map.
- __ PatchRelocatedValue(inline_site, scratch, map);
+ // Get the map location in scratch and patch it.
+ __ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch.
+ __ sw(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
}
// Register mapping: a3 is object map and t0 is function prototype.
@@ -4215,8 +4371,10 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
// Null is not instance of anything.
- __ Branch(&object_not_null, ne, scratch,
- Operand(masm->isolate()->factory()->null_value()));
+ __ Branch(&object_not_null,
+ ne,
+ scratch,
+ Operand(masm->isolate()->factory()->null_value()));
__ li(v0, Operand(Smi::FromInt(1)));
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
@@ -4263,7 +4421,7 @@ Register InstanceofStub::right() { return a1; }
void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
// The displacement is the offset of the last parameter (if any)
// relative to the frame pointer.
- static const int kDisplacement =
+ const int kDisplacement =
StandardFrameConstants::kCallerSPOffset - kPointerSize;
// Check that the key is a smiGenerateReadElement.
@@ -4321,8 +4479,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
Label runtime;
__ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
- __ Branch(&runtime, ne,
- a2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ Branch(&runtime,
+ ne,
+ a2,
+ Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// Patch the arguments.length and the parameters pointer in the current frame.
__ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
@@ -4354,7 +4514,9 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Label adaptor_frame, try_allocate;
__ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
- __ Branch(&adaptor_frame, eq, a2,
+ __ Branch(&adaptor_frame,
+ eq,
+ a2,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// No adaptor, parameter count = argument count.
@@ -4433,7 +4595,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ sw(a3, FieldMemOperand(v0, i));
}
- // Setup the callee in-object property.
+ // Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ lw(a3, MemOperand(sp, 2 * kPointerSize));
const int kCalleeOffset = JSObject::kHeaderSize +
@@ -4446,7 +4608,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Heap::kArgumentsLengthIndex * kPointerSize;
__ sw(a2, FieldMemOperand(v0, kLengthOffset));
- // Setup the elements pointer in the allocated arguments object.
+ // Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, t0 will point there, otherwise
// it will point to the backing store.
__ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize));
@@ -4544,11 +4706,10 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ Branch(&arguments_loop, lt, t5, Operand(a2));
// Return and remove the on-stack parameters.
- __ Addu(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ DropAndRet(3);
// Do the runtime call to allocate the arguments object.
- // a2 = argument count (taggged)
+ // a2 = argument count (tagged)
__ bind(&runtime);
__ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count.
__ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
@@ -4623,7 +4784,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the parameters pointer from the stack.
__ lw(a2, MemOperand(sp, 1 * kPointerSize));
- // Setup the elements pointer in the allocated arguments object and
+ // Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict));
__ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
@@ -4635,7 +4796,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Copy the fixed array slots.
Label loop;
- // Setup t0 to point to the first array slot.
+ // Set up t0 to point to the first array slot.
__ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ bind(&loop);
// Pre-decrement a2 with kPointerSize on each iteration.
@@ -4650,8 +4811,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Return and remove the on-stack parameters.
__ bind(&done);
- __ Addu(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ DropAndRet(3);
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
@@ -4673,10 +4833,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// sp[8]: subject string
// sp[12]: JSRegExp object
- static const int kLastMatchInfoOffset = 0 * kPointerSize;
- static const int kPreviousIndexOffset = 1 * kPointerSize;
- static const int kSubjectOffset = 2 * kPointerSize;
- static const int kJSRegExpOffset = 3 * kPointerSize;
+ const int kLastMatchInfoOffset = 0 * kPointerSize;
+ const int kPreviousIndexOffset = 1 * kPointerSize;
+ const int kSubjectOffset = 2 * kPointerSize;
+ const int kJSRegExpOffset = 3 * kPointerSize;
Isolate* isolate = masm->isolate();
@@ -4761,8 +4921,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check that the third argument is a positive smi less than the subject
// string length. A negative value will be greater (unsigned comparison).
__ lw(a0, MemOperand(sp, kPreviousIndexOffset));
- __ And(at, a0, Operand(kSmiTagMask));
- __ Branch(&runtime, ne, at, Operand(zero_reg));
+ __ JumpIfNotSmi(a0, &runtime);
__ Branch(&runtime, ls, a3, Operand(a0));
// a2: Number of capture registers
@@ -4795,25 +4954,38 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Label seq_string;
__ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
- // First check for flat string.
- __ And(a1, a0, Operand(kIsNotStringMask | kStringRepresentationMask));
+ // First check for flat string. None of the following string type tests will
+ // succeed if subject is not a string or a short external string.
+ __ And(a1,
+ a0,
+ Operand(kIsNotStringMask |
+ kStringRepresentationMask |
+ kShortExternalStringMask));
STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
__ Branch(&seq_string, eq, a1, Operand(zero_reg));
// subject: Subject string
// a0: instance type if Subject string
// regexp_data: RegExp data (FixedArray)
+ // a1: whether subject is a string and if yes, its string representation
// Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
// In the case of a sliced string its offset has to be taken into account.
- Label cons_string, check_encoding;
+ Label cons_string, external_string, check_encoding;
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
+ STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
__ Branch(&cons_string, lt, a1, Operand(kExternalStringTag));
- __ Branch(&runtime, eq, a1, Operand(kExternalStringTag));
+ __ Branch(&external_string, eq, a1, Operand(kExternalStringTag));
+
+ // Catch non-string subject or short external string.
+ STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
+ __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
+ __ Branch(&runtime, ne, at, Operand(zero_reg));
// String is sliced.
__ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
@@ -4833,7 +5005,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
STATIC_ASSERT(kSeqStringTag == 0);
__ And(at, a0, Operand(kStringRepresentationMask));
- __ Branch(&runtime, ne, at, Operand(zero_reg));
+ __ Branch(&external_string, ne, at, Operand(zero_reg));
__ bind(&seq_string);
// subject: Subject string
@@ -4843,11 +5015,11 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kAsciiStringTag == 4);
STATIC_ASSERT(kTwoByteStringTag == 0);
// Find the code object based on the assumptions above.
- __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ascii.
+ __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ASCII.
__ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset));
- __ sra(a3, a0, 2); // a3 is 1 for ascii, 0 for UC16 (usyed below).
+ __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
__ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
- __ movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
+ __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
// Check that the irregexp code has been generated for the actual string
// encoding. If it has, the field contains a code object otherwise it contains
@@ -4873,8 +5045,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
1, a0, a2);
// Isolates: note we add an additional parameter here (isolate pointer).
- static const int kRegExpExecuteArguments = 8;
- static const int kParameterRegisters = 4;
+ const int kRegExpExecuteArguments = 8;
+ const int kParameterRegisters = 4;
__ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters);
// Stack pointer now points to cell where return address is to be written.
@@ -4955,14 +5127,11 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ Branch(&success, eq,
- v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
Label failure;
- __ Branch(&failure, eq,
- v0, Operand(NativeRegExpMacroAssembler::FAILURE));
+ __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
// If not exception it can only be retry. Handle that in the runtime system.
- __ Branch(&runtime, ne,
- v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
+ __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
// Result must now be exception. If there is no pending exception already a
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
@@ -4980,16 +5149,15 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Label termination_exception;
__ Branch(&termination_exception, eq, v0, Operand(a0));
- __ Throw(v0); // Expects thrown value in v0.
+ __ Throw(v0);
__ bind(&termination_exception);
- __ ThrowUncatchable(TERMINATION, v0); // Expects thrown value in v0.
+ __ ThrowUncatchable(v0);
__ bind(&failure);
// For failure and exception return null.
__ li(v0, Operand(isolate->factory()->null_value()));
- __ Addu(sp, sp, Operand(4 * kPointerSize));
- __ Ret();
+ __ DropAndRet(4);
// Process the result from the native regexp code.
__ bind(&success);
@@ -5050,14 +5218,36 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ sll(a3, a3, kSmiTagSize); // Convert to Smi.
__ sw(a3, MemOperand(a0, 0));
__ Branch(&next_capture, USE_DELAY_SLOT);
- __ addiu(a0, a0, kPointerSize); // In branch delay slot.
+ __ addiu(a0, a0, kPointerSize); // In branch delay slot.
__ bind(&done);
// Return last match info.
__ lw(v0, MemOperand(sp, kLastMatchInfoOffset));
- __ Addu(sp, sp, Operand(4 * kPointerSize));
- __ Ret();
+ __ DropAndRet(4);
+
+ // External string. Short external strings have already been ruled out.
+ // a0: scratch
+ __ bind(&external_string);
+ __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
+ __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ And(at, a0, Operand(kIsIndirectStringMask));
+ __ Assert(eq,
+ "external string expected, but not found",
+ at,
+ Operand(zero_reg));
+ }
+ __ lw(subject,
+ FieldMemOperand(subject, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ Subu(subject,
+ subject,
+ SeqTwoByteString::kHeaderSize - kHeapObjectTag);
+ __ jmp(&seq_string);
// Do the runtime call to execute the regexp.
__ bind(&runtime);
@@ -5111,11 +5301,11 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
// Set input, index and length fields from arguments.
__ lw(a1, MemOperand(sp, kPointerSize * 0));
+ __ lw(a2, MemOperand(sp, kPointerSize * 1));
+ __ lw(t2, MemOperand(sp, kPointerSize * 2));
__ sw(a1, FieldMemOperand(v0, JSRegExpResult::kInputOffset));
- __ lw(a1, MemOperand(sp, kPointerSize * 1));
- __ sw(a1, FieldMemOperand(v0, JSRegExpResult::kIndexOffset));
- __ lw(a1, MemOperand(sp, kPointerSize * 2));
- __ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset));
+ __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset));
+ __ sw(t2, FieldMemOperand(v0, JSArray::kLengthOffset));
// Fill out the elements FixedArray.
// v0: JSArray, tagged.
@@ -5146,31 +5336,58 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
__ addiu(a3, a3, kPointerSize); // In branch delay slot.
__ bind(&done);
- __ Addu(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ DropAndRet(3);
__ bind(&slowcase);
__ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
}
-void CallFunctionStub::FinishCode(Code* code) {
- code->set_has_function_cache(false);
-}
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+ // Cache the called function in a global property cell. Cache states
+ // are uninitialized, monomorphic (indicated by a JSFunction), and
+ // megamorphic.
+ // a1 : the function to call
+ // a2 : cache cell for call target
+ Label done;
+ ASSERT_EQ(*TypeFeedbackCells::MegamorphicSentinel(masm->isolate()),
+ masm->isolate()->heap()->undefined_value());
+ ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
+ masm->isolate()->heap()->the_hole_value());
-void CallFunctionStub::Clear(Heap* heap, Address address) {
- UNREACHABLE();
-}
+ // Load the cache state into a3.
+ __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+
+ // A monomorphic cache hit or an already megamorphic state: invoke the
+ // function without changing the state.
+ __ Branch(&done, eq, a3, Operand(a1));
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ Branch(&done, eq, a3, Operand(at));
+ // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+ // megamorphic.
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
-Object* CallFunctionStub::GetCachedValue(Address address) {
- UNREACHABLE();
- return NULL;
+ __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at));
+ // An uninitialized cache is patched with the function.
+ // Store a1 in the delay slot. This may or may not get overwritten depending
+ // on the result of the comparison.
+ __ sw(a1, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+ // No need for a write barrier here - cells are rescanned.
+
+ // MegamorphicSentinel is an immortal immovable object (undefined) so no
+ // write-barrier is needed.
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+
+ __ bind(&done);
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
+ // a1 : the function to call
+ // a2 : cache cell for call target
Label slow, non_function;
// The receiver might implicitly be the global object. This is
@@ -5185,16 +5402,12 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
__ Branch(&call, ne, t0, Operand(at));
// Patch the receiver on the stack with the global receiver object.
- __ lw(a1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
- __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
- __ sw(a1, MemOperand(sp, argc_ * kPointerSize));
+ __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
+ __ sw(a2, MemOperand(sp, argc_ * kPointerSize));
__ bind(&call);
}
- // Get the function to call from the stack.
- // function, receiver [, arguments]
- __ lw(a1, MemOperand(sp, (argc_ + 1) * kPointerSize));
-
// Check that the function is really a JavaScript function.
// a1: pushed function (to be verified)
__ JumpIfSmi(a1, &non_function);
@@ -5231,7 +5444,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ li(a0, Operand(argc_ + 1, RelocInfo::NONE));
__ li(a2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY);
- __ SetCallKind(t1, CALL_AS_FUNCTION);
+ __ SetCallKind(t1, CALL_AS_METHOD);
{
Handle<Code> adaptor =
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
@@ -5242,7 +5455,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// of the original receiver from the call site).
__ bind(&non_function);
__ sw(a1, MemOperand(sp, argc_ * kPointerSize));
- __ li(a0, Operand(argc_)); // Setup the number of arguments.
+ __ li(a0, Operand(argc_)); // Set up the number of arguments.
__ mov(a2, zero_reg);
__ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION);
__ SetCallKind(t1, CALL_AS_METHOD);
@@ -5251,6 +5464,48 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
}
+void CallConstructStub::Generate(MacroAssembler* masm) {
+ // a0 : number of arguments
+ // a1 : the function to call
+ // a2 : cache cell for call target
+ Label slow, non_function_call;
+
+ // Check that the function is not a smi.
+ __ JumpIfSmi(a1, &non_function_call);
+ // Check that the function is a JSFunction.
+ __ GetObjectType(a1, a3, a3);
+ __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE));
+
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
+ // Jump to the function-specific construct stub.
+ __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset));
+ __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
+
+ // a0: number of arguments
+ // a1: called object
+ // a3: object type
+ Label do_call;
+ __ bind(&slow);
+ __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
+ __ jmp(&do_call);
+
+ __ bind(&non_function_call);
+ __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
+ __ bind(&do_call);
+ // Set expected number of arguments to zero (not changing r0).
+ __ li(a2, Operand(0, RelocInfo::NONE));
+ __ SetCallKind(t1, CALL_AS_METHOD);
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ RelocInfo::CODE_TARGET);
+}
+
+
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
void CompareStub::PrintName(StringStream* stream) {
@@ -5297,7 +5552,6 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
Label got_char_code;
Label sliced_string;
- ASSERT(!t0.is(scratch_));
ASSERT(!t0.is(index_));
ASSERT(!t0.is(result_));
ASSERT(!t0.is(object_));
@@ -5315,85 +5569,20 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// If the index is non-smi trigger the non-smi case.
__ JumpIfNotSmi(index_, &index_not_smi_);
- // Put smi-tagged index into scratch register.
- __ mov(scratch_, index_);
__ bind(&got_smi_index_);
// Check for index out of range.
__ lw(t0, FieldMemOperand(object_, String::kLengthOffset));
- __ Branch(index_out_of_range_, ls, t0, Operand(scratch_));
-
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ And(t0, result_, Operand(kStringRepresentationMask));
- __ Branch(&flat_string, eq, t0, Operand(zero_reg));
+ __ Branch(index_out_of_range_, ls, t0, Operand(index_));
- // Handle non-flat strings.
- __ And(result_, result_, Operand(kStringRepresentationMask));
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
- __ Branch(&sliced_string, gt, result_, Operand(kExternalStringTag));
- __ Branch(&call_runtime_, eq, result_, Operand(kExternalStringTag));
-
- // ConsString.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- Label assure_seq_string;
- __ lw(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
- __ LoadRoot(t0, Heap::kEmptyStringRootIndex);
- __ Branch(&call_runtime_, ne, result_, Operand(t0));
+ __ sra(index_, index_, kSmiTagSize);
- // Get the first of the two strings and load its instance type.
- __ lw(result_, FieldMemOperand(object_, ConsString::kFirstOffset));
- __ jmp(&assure_seq_string);
+ StringCharLoadGenerator::Generate(masm,
+ object_,
+ index_,
+ result_,
+ &call_runtime_);
- // SlicedString, unpack and add offset.
- __ bind(&sliced_string);
- __ lw(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset));
- __ addu(scratch_, scratch_, result_);
- __ lw(result_, FieldMemOperand(object_, SlicedString::kParentOffset));
-
- // Assure that we are dealing with a sequential string. Go to runtime if not.
- __ bind(&assure_seq_string);
- __ lw(result_, FieldMemOperand(result_, HeapObject::kMapOffset));
- __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
- // Check that parent is not an external string. Go to runtime otherwise.
- STATIC_ASSERT(kSeqStringTag == 0);
-
- __ And(t0, result_, Operand(kStringRepresentationMask));
- __ Branch(&call_runtime_, ne, t0, Operand(zero_reg));
- // Actually fetch the parent string if it is confirmed to be sequential.
- STATIC_ASSERT(SlicedString::kParentOffset == ConsString::kFirstOffset);
- __ lw(object_, FieldMemOperand(object_, SlicedString::kParentOffset));
-
- // Check for 1-byte or 2-byte string.
- __ bind(&flat_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ And(t0, result_, Operand(kStringEncodingMask));
- __ Branch(&ascii_string, ne, t0, Operand(zero_reg));
-
- // 2-byte string.
- // Load the 2-byte character code into the result register. We can
- // add without shifting since the smi tag size is the log2 of the
- // number of bytes in a two-byte character.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1 && kSmiShiftSize == 0);
- __ Addu(scratch_, object_, Operand(scratch_));
- __ lhu(result_, FieldMemOperand(scratch_, SeqTwoByteString::kHeaderSize));
- __ Branch(&got_char_code);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
-
- __ srl(t0, scratch_, kSmiTagSize);
- __ Addu(scratch_, object_, t0);
-
- __ lbu(result_, FieldMemOperand(scratch_, SeqAsciiString::kHeaderSize));
-
- __ bind(&got_char_code);
__ sll(result_, result_, kSmiTagSize);
__ bind(&exit_);
}
@@ -5408,13 +5597,13 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ bind(&index_not_smi_);
// If index is a heap number, try converting it to an integer.
__ CheckMap(index_,
- scratch_,
+ result_,
Heap::kHeapNumberMapRootIndex,
index_not_number_,
DONT_DO_SMI_CHECK);
call_helper.BeforeCall(masm);
// Consumed by runtime conversion function:
- __ Push(object_, index_, index_);
+ __ Push(object_, index_);
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
__ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
} else {
@@ -5426,16 +5615,14 @@ void StringCharCodeAtGenerator::GenerateSlow(
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
- __ Move(scratch_, v0);
-
- __ pop(index_);
+ __ Move(index_, v0);
__ pop(object_);
// Reload the instance type.
__ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
__ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
call_helper.AfterCall(masm);
// If index is still not a smi, it must be out of range.
- __ JumpIfNotSmi(scratch_, index_out_of_range_);
+ __ JumpIfNotSmi(index_, index_out_of_range_);
// Otherwise, return to the fast path.
__ Branch(&got_smi_index_);
@@ -5444,6 +5631,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
// is too complex (e.g., when the string needs to be flattened).
__ bind(&call_runtime_);
call_helper.BeforeCall(masm);
+ __ sll(index_, index_, kSmiTagSize);
__ Push(object_, index_);
__ CallRuntime(Runtime::kStringCharCodeAt, 2);
@@ -5521,70 +5709,6 @@ void StringCharAtGenerator::GenerateSlow(
}
-class StringHelper : public AllStatic {
- public:
- // Generate code for copying characters using a simple loop. This should only
- // be used in places where the number of characters is small and the
- // additional setup and checking in GenerateCopyCharactersLong adds too much
- // overhead. Copying of overlapping regions is not supported.
- // Dest register ends at the position after the last character written.
- static void GenerateCopyCharacters(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- Register scratch,
- bool ascii);
-
- // Generate code for copying a large number of characters. This function
- // is allowed to spend extra time setting up conditions to make copying
- // faster. Copying of overlapping regions is not supported.
- // Dest register ends at the position after the last character written.
- static void GenerateCopyCharactersLong(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Register scratch4,
- Register scratch5,
- int flags);
-
-
- // Probe the symbol table for a two character string. If the string is
- // not found by probing a jump to the label not_found is performed. This jump
- // does not guarantee that the string is not in the symbol table. If the
- // string is found the code falls through with the string in register r0.
- // Contents of both c1 and c2 registers are modified. At the exit c1 is
- // guaranteed to contain halfword with low and high bytes equal to
- // initial contents of c1 and c2 respectively.
- static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
- Register c1,
- Register c2,
- Register scratch1,
- Register scratch2,
- Register scratch3,
- Register scratch4,
- Register scratch5,
- Label* not_found);
-
- // Generate string hash.
- static void GenerateHashInit(MacroAssembler* masm,
- Register hash,
- Register character);
-
- static void GenerateHashAddCharacter(MacroAssembler* masm,
- Register hash,
- Register character);
-
- static void GenerateHashGetHash(MacroAssembler* masm,
- Register hash);
-
- private:
- DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
-};
-
-
void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
Register dest,
Register src,
@@ -5760,10 +5884,8 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ Branch(&tmp, Ugreater, scratch, Operand(static_cast<int>('9' - '0')));
__ Or(c1, c1, scratch1);
__ bind(&tmp);
- __ Branch(not_found,
- Uless_equal,
- scratch,
- Operand(static_cast<int>('9' - '0')));
+ __ Branch(
+ not_found, Uless_equal, scratch, Operand(static_cast<int>('9' - '0')));
__ bind(&not_array_index);
// Calculate the two character string hash.
@@ -5809,7 +5931,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// scratch: -
// Perform a number of probes in the symbol table.
- static const int kProbes = 4;
+ const int kProbes = 4;
Label found_in_symbol_table;
Label next_probe[kProbes];
Register candidate = scratch5; // Scratch register contains candidate.
@@ -5835,10 +5957,10 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE));
__ Branch(not_found, eq, undefined, Operand(candidate));
- // Must be null (deleted entry).
+ // Must be the hole (deleted entry).
if (FLAG_debug_code) {
- __ LoadRoot(scratch, Heap::kNullValueRootIndex);
- __ Assert(eq, "oddball in symbol table is not undefined or null",
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ __ Assert(eq, "oddball in symbol table is not undefined or the hole",
scratch, Operand(candidate));
}
__ jmp(&next_probe[i]);
@@ -5872,51 +5994,58 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
void StringHelper::GenerateHashInit(MacroAssembler* masm,
- Register hash,
- Register character) {
- // hash = character + (character << 10);
- __ sll(hash, character, 10);
+ Register hash,
+ Register character) {
+ // hash = seed + character + ((seed + character) << 10);
+ __ LoadRoot(hash, Heap::kHashSeedRootIndex);
+ // Untag smi seed and add the character.
+ __ SmiUntag(hash);
__ addu(hash, hash, character);
+ __ sll(at, hash, 10);
+ __ addu(hash, hash, at);
// hash ^= hash >> 6;
- __ sra(at, hash, 6);
+ __ srl(at, hash, 6);
__ xor_(hash, hash, at);
}
void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
- Register hash,
- Register character) {
+ Register hash,
+ Register character) {
// hash += character;
__ addu(hash, hash, character);
// hash += hash << 10;
__ sll(at, hash, 10);
__ addu(hash, hash, at);
// hash ^= hash >> 6;
- __ sra(at, hash, 6);
+ __ srl(at, hash, 6);
__ xor_(hash, hash, at);
}
void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
- Register hash) {
+ Register hash) {
// hash += hash << 3;
__ sll(at, hash, 3);
__ addu(hash, hash, at);
// hash ^= hash >> 11;
- __ sra(at, hash, 11);
+ __ srl(at, hash, 11);
__ xor_(hash, hash, at);
// hash += hash << 15;
__ sll(at, hash, 15);
__ addu(hash, hash, at);
+ __ li(at, Operand(String::kHashBitMask));
+ __ and_(hash, hash, at);
+
// if (hash == 0) hash = 27;
- __ ori(at, zero_reg, 27);
- __ movz(hash, at, hash);
+ __ ori(at, zero_reg, StringHasher::kZeroHash);
+ __ Movz(hash, at, hash);
}
void SubStringStub::Generate(MacroAssembler* masm) {
- Label sub_string_runtime;
+ Label runtime;
// Stack frame on entry.
// ra: return address
// sp[0]: to
@@ -5930,57 +6059,35 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// 0 <= from <= to <= string.length.
// If any of these assumptions fail, we call the runtime system.
- static const int kToOffset = 0 * kPointerSize;
- static const int kFromOffset = 1 * kPointerSize;
- static const int kStringOffset = 2 * kPointerSize;
-
- Register to = t2;
- Register from = t3;
+ const int kToOffset = 0 * kPointerSize;
+ const int kFromOffset = 1 * kPointerSize;
+ const int kStringOffset = 2 * kPointerSize;
- // Check bounds and smi-ness.
- __ lw(to, MemOperand(sp, kToOffset));
- __ lw(from, MemOperand(sp, kFromOffset));
+ __ lw(a2, MemOperand(sp, kToOffset));
+ __ lw(a3, MemOperand(sp, kFromOffset));
STATIC_ASSERT(kFromOffset == kToOffset + 4);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
- __ JumpIfNotSmi(from, &sub_string_runtime);
- __ JumpIfNotSmi(to, &sub_string_runtime);
-
- __ sra(a3, from, kSmiTagSize); // Remove smi tag.
- __ sra(t5, to, kSmiTagSize); // Remove smi tag.
-
- // a3: from index (untagged smi)
- // t5: to index (untagged smi)
-
- __ Branch(&sub_string_runtime, lt, a3, Operand(zero_reg)); // From < 0.
-
- __ subu(a2, t5, a3);
- __ Branch(&sub_string_runtime, gt, a3, Operand(t5)); // Fail if from > to.
+ // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is
+ // safe in this case.
+ __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
+ __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
+ // Both a2 and a3 are untagged integers.
- // Special handling of sub-strings of length 1 and 2. One character strings
- // are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache in
- // generated code.
- __ Branch(&sub_string_runtime, lt, a2, Operand(2));
+ __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0.
- // Both to and from are smis.
+ __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to.
+ __ Subu(a2, a2, a3);
- // a2: result string length
- // a3: from index (untagged smi)
- // t2: (a.k.a. to): to (smi)
- // t3: (a.k.a. from): from offset (smi)
- // t5: to index (untagged smi)
-
- // Make sure first argument is a sequential (or flat) string.
+ // Make sure first argument is a string.
__ lw(v0, MemOperand(sp, kStringOffset));
- __ Branch(&sub_string_runtime, eq, v0, Operand(kSmiTagMask));
-
+ __ JumpIfSmi(v0, &runtime);
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
- __ And(t4, v0, Operand(kIsNotStringMask));
+ __ And(t0, a1, Operand(kIsNotStringMask));
- __ Branch(&sub_string_runtime, ne, t4, Operand(zero_reg));
+ __ Branch(&runtime, ne, t0, Operand(zero_reg));
// Short-cut for the case of trivial substring.
Label return_v0;
@@ -5990,74 +6097,16 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ sra(t0, t0, 1);
__ Branch(&return_v0, eq, a2, Operand(t0));
- Label create_slice;
- if (FLAG_string_slices) {
- __ Branch(&create_slice, ge, a2, Operand(SlicedString::kMinLength));
- }
-
- // v0: original string
- // a1: instance type
- // a2: result string length
- // a3: from index (untagged smi)
- // t2: (a.k.a. to): to (smi)
- // t3: (a.k.a. from): from offset (smi)
- // t5: to index (untagged smi)
-
- Label seq_string;
- __ And(t0, a1, Operand(kStringRepresentationMask));
- STATIC_ASSERT(kSeqStringTag < kConsStringTag);
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kConsStringTag < kSlicedStringTag);
-
- // Slices and external strings go to runtime.
- __ Branch(&sub_string_runtime, gt, t0, Operand(kConsStringTag));
-
- // Sequential strings are handled directly.
- __ Branch(&seq_string, lt, t0, Operand(kConsStringTag));
-
- // Cons string. Try to recurse (once) on the first substring.
- // (This adds a little more generality than necessary to handle flattened
- // cons strings, but not much).
- __ lw(v0, FieldMemOperand(v0, ConsString::kFirstOffset));
- __ lw(t0, FieldMemOperand(v0, HeapObject::kMapOffset));
- __ lbu(a1, FieldMemOperand(t0, Map::kInstanceTypeOffset));
- STATIC_ASSERT(kSeqStringTag == 0);
- // Cons, slices and external strings go to runtime.
- __ Branch(&sub_string_runtime, ne, a1, Operand(kStringRepresentationMask));
-
- // Definitly a sequential string.
- __ bind(&seq_string);
-
- // v0: original string
- // a1: instance type
- // a2: result string length
- // a3: from index (untagged smi)
- // t2: (a.k.a. to): to (smi)
- // t3: (a.k.a. from): from offset (smi)
- // t5: to index (untagged smi)
-
- __ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
- __ Branch(&sub_string_runtime, lt, t0, Operand(to)); // Fail if to > length.
- to = no_reg;
-
- // v0: original string or left hand side of the original cons string.
- // a1: instance type
- // a2: result string length
- // a3: from index (untagged smi)
- // t3: (a.k.a. from): from offset (smi)
- // t5: to index (untagged smi)
-
- // Check for flat ASCII string.
- Label non_ascii_flat;
- STATIC_ASSERT(kTwoByteStringTag == 0);
-
- __ And(t4, a1, Operand(kStringEncodingMask));
- __ Branch(&non_ascii_flat, eq, t4, Operand(zero_reg));
Label result_longer_than_two;
- __ Branch(&result_longer_than_two, gt, a2, Operand(2));
+ // Check for special case of two character ASCII string, in which case
+ // we do a lookup in the symbol table first.
+ __ li(t0, 2);
+ __ Branch(&result_longer_than_two, gt, a2, Operand(t0));
+ __ Branch(&runtime, lt, a2, Operand(t0));
+
+ __ JumpIfInstanceTypeIsNotSequentialAscii(a1, a1, &runtime);
- // Sub string of length 2 requested.
// Get the two characters forming the sub string.
__ Addu(v0, v0, Operand(a3));
__ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
@@ -6067,31 +6116,126 @@ void SubStringStub::Generate(MacroAssembler* masm) {
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string);
- Counters* counters = masm->isolate()->counters();
__ jmp(&return_v0);
// a2: result string length.
// a3: two characters combined into halfword in little endian byte order.
__ bind(&make_two_character_string);
- __ AllocateAsciiString(v0, a2, t0, t1, t4, &sub_string_runtime);
+ __ AllocateAsciiString(v0, a2, t0, t1, t4, &runtime);
__ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
__ jmp(&return_v0);
__ bind(&result_longer_than_two);
- // Locate 'from' character of string.
- __ Addu(t1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ sra(t4, from, 1);
- __ Addu(t1, t1, t4);
+ // Deal with different string types: update the index if necessary
+ // and put the underlying string into t1.
+ // v0: original string
+ // a1: instance type
+ // a2: length
+ // a3: from index (untagged)
+ Label underlying_unpacked, sliced_string, seq_or_external_string;
+ // If the string is not indirect, it can only be sequential or external.
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ And(t0, a1, Operand(kIsIndirectStringMask));
+ __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg));
+ // t0 is used as a scratch register and can be overwritten in either case.
+ __ And(t0, a1, Operand(kSlicedNotConsMask));
+ __ Branch(&sliced_string, ne, t0, Operand(zero_reg));
+ // Cons string. Check whether it is flat, then fetch first part.
+ __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
+ __ LoadRoot(t0, Heap::kEmptyStringRootIndex);
+ __ Branch(&runtime, ne, t1, Operand(t0));
+ __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
+ // Update instance type.
+ __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
+ __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked);
- // Allocate the result.
- __ AllocateAsciiString(v0, a2, t4, t0, a1, &sub_string_runtime);
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and correct start index by offset.
+ __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
+ __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset));
+ __ sra(t0, t0, 1); // Add offset to index.
+ __ Addu(a3, a3, t0);
+ // Update instance type.
+ __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
+ __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked);
+
+ __ bind(&seq_or_external_string);
+ // Sequential or external string. Just move string to the expected register.
+ __ mov(t1, v0);
+
+ __ bind(&underlying_unpacked);
+
+ if (FLAG_string_slices) {
+ Label copy_routine;
+ // t1: underlying subject string
+ // a1: instance type of underlying subject string
+ // a2: length
+ // a3: adjusted start index (untagged)
+ // Short slice. Copy instead of slicing.
+ __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength));
+ // Allocate new sliced string. At this point we do not reload the instance
+ // type including the string encoding because we simply rely on the info
+ // provided by the original string. It does not matter if the original
+ // string's encoding is wrong because we always have to recheck encoding of
+ // the newly created string's parent anyways due to externalized strings.
+ Label two_byte_slice, set_slice_header;
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+ __ And(t0, a1, Operand(kStringEncodingMask));
+ __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg));
+ __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime);
+ __ jmp(&set_slice_header);
+ __ bind(&two_byte_slice);
+ __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
+ __ bind(&set_slice_header);
+ __ sll(a3, a3, 1);
+ __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
+ __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
+ __ jmp(&return_v0);
+
+ __ bind(&copy_routine);
+ }
+
+ // t1: underlying subject string
+ // a1: instance type of underlying subject string
+ // a2: length
+ // a3: adjusted start index (untagged)
+ Label two_byte_sequential, sequential_string, allocate_result;
+ STATIC_ASSERT(kExternalStringTag != 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ And(t0, a1, Operand(kExternalStringTag));
+ __ Branch(&sequential_string, eq, t0, Operand(zero_reg));
+
+ // Handle external string.
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ And(t0, a1, Operand(kShortExternalStringTag));
+ __ Branch(&runtime, ne, t0, Operand(zero_reg));
+ __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset));
+ // t1 already points to the first character of underlying string.
+ __ jmp(&allocate_result);
+
+ __ bind(&sequential_string);
+ // Locate first character of underlying subject string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ Addu(t1, t1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+
+ __ bind(&allocate_result);
+ // Sequential acii string. Allocate the result.
+ STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ __ And(t0, a1, Operand(kStringEncodingMask));
+ __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg));
+
+ // Allocate and copy the resulting ASCII string.
+ __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime);
+
+ // Locate first character of substring to copy.
+ __ Addu(t1, t1, a3);
- // v0: result string
- // a2: result string length
- // a3: from index (untagged smi)
- // t1: first character of substring to copy
- // t3: (a.k.a. from): from offset (smi)
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
@@ -6104,30 +6248,17 @@ void SubStringStub::Generate(MacroAssembler* masm) {
masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
__ jmp(&return_v0);
- __ bind(&non_ascii_flat);
- // a2: result string length
- // t1: string
- // t3: (a.k.a. from): from offset (smi)
- // Check for flat two byte string.
-
- // Locate 'from' character of string.
- __ Addu(t1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // As "from" is a smi it is 2 times the value which matches the size of a two
- // byte character.
- STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
- __ Addu(t1, t1, Operand(from));
-
- // Allocate the result.
- __ AllocateTwoByteString(v0, a2, a1, a3, t0, &sub_string_runtime);
+ // Allocate and copy the resulting two-byte string.
+ __ bind(&two_byte_sequential);
+ __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
- // v0: result string
- // a2: result string length
- // t1: first character of substring to copy
+ // Locate first character of substring to copy.
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ __ sll(t0, a3, 1);
+ __ Addu(t1, t1, t0);
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- from = no_reg;
-
// v0: result string.
// a1: first character of result.
// a2: result length.
@@ -6135,77 +6266,14 @@ void SubStringStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
- __ jmp(&return_v0);
-
- if (FLAG_string_slices) {
- __ bind(&create_slice);
- // v0: original string
- // a1: instance type
- // a2: length
- // a3: from index (untagged smi)
- // t2 (a.k.a. to): to (smi)
- // t3 (a.k.a. from): from offset (smi)
- Label allocate_slice, sliced_string, seq_string;
- STATIC_ASSERT(kSeqStringTag == 0);
- __ And(t4, a1, Operand(kStringRepresentationMask));
- __ Branch(&seq_string, eq, t4, Operand(zero_reg));
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ And(t4, a1, Operand(kIsIndirectStringMask));
- // External string. Jump to runtime.
- __ Branch(&sub_string_runtime, eq, t4, Operand(zero_reg));
-
- __ And(t4, a1, Operand(kSlicedNotConsMask));
- __ Branch(&sliced_string, ne, t4, Operand(zero_reg));
- // Cons string. Check whether it is flat, then fetch first part.
- __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
- __ LoadRoot(t5, Heap::kEmptyStringRootIndex);
- __ Branch(&sub_string_runtime, ne, t1, Operand(t5));
- __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ lw(t1, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ addu(t3, t3, t1);
- __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- __ jmp(&allocate_slice);
-
- __ bind(&seq_string);
- // Sequential string. Just move string to the right register.
- __ mov(t1, v0);
-
- __ bind(&allocate_slice);
- // a1: instance type of original string
- // a2: length
- // t1: underlying subject string
- // t3 (a.k.a. from): from offset (smi)
- // Allocate new sliced string. At this point we do not reload the instance
- // type including the string encoding because we simply rely on the info
- // provided by the original string. It does not matter if the original
- // string's encoding is wrong because we always have to recheck encoding of
- // the newly created string's parent anyways due to externalized strings.
- Label two_byte_slice, set_slice_header;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ And(t4, a1, Operand(kStringEncodingMask));
- __ Branch(&two_byte_slice, eq, t4, Operand(zero_reg));
- __ AllocateAsciiSlicedString(v0, a2, a3, t0, &sub_string_runtime);
- __ jmp(&set_slice_header);
- __ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(v0, a2, a3, t0, &sub_string_runtime);
- __ bind(&set_slice_header);
- __ sw(t3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
- __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
- }
__ bind(&return_v0);
+ Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
- __ Addu(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ DropAndRet(3);
// Just jump to runtime to create the sub string.
- __ bind(&sub_string_runtime);
+ __ bind(&runtime);
__ TailCallRuntime(Runtime::kSubString, 3, 1);
}
@@ -6262,7 +6330,7 @@ void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ Subu(scratch3, scratch1, Operand(scratch2));
Register length_delta = scratch3;
__ slt(scratch4, scratch2, scratch1);
- __ movn(scratch1, scratch2, scratch4);
+ __ Movn(scratch1, scratch2, scratch4);
Register min_length = scratch1;
STATIC_ASSERT(kSmiTag == 0);
__ Branch(&compare_lengths, eq, min_length, Operand(zero_reg));
@@ -6344,8 +6412,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(kSmiTag == 0);
__ li(v0, Operand(Smi::FromInt(EQUAL)));
__ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&not_same);
@@ -6363,7 +6430,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
void StringAddStub::Generate(MacroAssembler* masm) {
- Label string_add_runtime, call_builtin;
+ Label call_runtime, call_builtin;
Builtins::JavaScript builtin_id = Builtins::ADD;
Counters* counters = masm->isolate()->counters();
@@ -6378,7 +6445,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Make sure that both arguments are strings if not known in advance.
if (flags_ == NO_STRING_ADD_FLAGS) {
- __ JumpIfEitherSmi(a0, a1, &string_add_runtime);
+ __ JumpIfEitherSmi(a0, a1, &call_runtime);
// Load instance types.
__ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
__ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
@@ -6388,7 +6455,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// If either is not a string, go to runtime.
__ Or(t4, t0, Operand(t1));
__ And(t4, t4, Operand(kIsNotStringMask));
- __ Branch(&string_add_runtime, ne, t4, Operand(zero_reg));
+ __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
} else {
// Here at least one of the arguments is definitely a string.
// We convert the one that is not known to be a string.
@@ -6420,15 +6487,14 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ lw(a2, FieldMemOperand(a0, String::kLengthOffset));
__ lw(a3, FieldMemOperand(a1, String::kLengthOffset));
__ mov(v0, a0); // Assume we'll return first string (from a0).
- __ movz(v0, a1, a2); // If first is empty, return second (from a1).
+ __ Movz(v0, a1, a2); // If first is empty, return second (from a1).
__ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1.
__ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1.
__ and_(t4, t4, t5); // Branch if both strings were non-empty.
__ Branch(&strings_not_empty, ne, t4, Operand(zero_reg));
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&strings_not_empty);
}
@@ -6461,7 +6527,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
}
__ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3,
- &string_add_runtime);
+ &call_runtime);
// Get the two characters forming the sub string.
__ lbu(a2, FieldMemOperand(a0, SeqAsciiString::kHeaderSize));
@@ -6471,10 +6537,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// just allocate a new one.
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, a2, a3, t2, t3, t0, t1, t4, &make_two_character_string);
+ masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string);
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&make_two_character_string);
// Resulting string has length 2 and first chars of two strings
@@ -6483,21 +6548,19 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// halfword store instruction (which assumes that processor is
// in a little endian mode).
__ li(t2, Operand(2));
- __ AllocateAsciiString(v0, t2, t0, t1, t4, &string_add_runtime);
+ __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
__ sh(a2, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&longer_than_two);
// Check if resulting string will be flat.
- __ Branch(&string_add_flat_result, lt, t2,
- Operand(String::kMinNonFlatLength));
+ __ Branch(&string_add_flat_result, lt, t2, Operand(ConsString::kMinLength));
// Handle exceptionally long strings in the runtime system.
STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
ASSERT(IsPowerOf2(String::kMaxLength + 1));
// kMaxLength + 1 is representable as shifted literal, kMaxLength is not.
- __ Branch(&string_add_runtime, hs, t2, Operand(String::kMaxLength + 1));
+ __ Branch(&call_runtime, hs, t2, Operand(String::kMaxLength + 1));
// If result is not supposed to be flat, allocate a cons string object.
// If both strings are ASCII the result is an ASCII cons string.
@@ -6509,22 +6572,20 @@ void StringAddStub::Generate(MacroAssembler* masm) {
}
Label non_ascii, allocated, ascii_data;
STATIC_ASSERT(kTwoByteStringTag == 0);
- // Branch to non_ascii if either string-encoding field is zero (non-ascii).
+ // Branch to non_ascii if either string-encoding field is zero (non-ASCII).
__ And(t4, t0, Operand(t1));
__ And(t4, t4, Operand(kStringEncodingMask));
__ Branch(&non_ascii, eq, t4, Operand(zero_reg));
// Allocate an ASCII cons string.
__ bind(&ascii_data);
- __ AllocateAsciiConsString(t3, t2, t0, t1, &string_add_runtime);
+ __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
- __ sw(a0, FieldMemOperand(t3, ConsString::kFirstOffset));
- __ sw(a1, FieldMemOperand(t3, ConsString::kSecondOffset));
- __ mov(v0, t3);
+ __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset));
+ __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset));
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
@@ -6542,11 +6603,13 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ Branch(&ascii_data, eq, t0, Operand(kAsciiStringTag | kAsciiDataHintTag));
// Allocate a two byte cons string.
- __ AllocateTwoByteConsString(t3, t2, t0, t1, &string_add_runtime);
+ __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime);
__ Branch(&allocated);
- // Handle creating a flat result. First check that both strings are
- // sequential and that they have the same encoding.
+ // We cannot encounter sliced strings or cons strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
+ // Handle creating a flat result from either external or sequential strings.
+ // Locate the first characters' locations.
// a0: first string
// a1: second string
// a2: length of first string
@@ -6554,6 +6617,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS)
// t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS)
// t2: sum of lengths.
+ Label first_prepared, second_prepared;
__ bind(&string_add_flat_result);
if (flags_ != NO_STRING_ADD_FLAGS) {
__ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset));
@@ -6561,101 +6625,86 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset));
__ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset));
}
- // Check that both strings are sequential, meaning that we
- // branch to runtime if either string tag is non-zero.
+ // Check whether both strings have same encoding
+ __ Xor(t3, t0, Operand(t1));
+ __ And(t3, t3, Operand(kStringEncodingMask));
+ __ Branch(&call_runtime, ne, t3, Operand(zero_reg));
+
STATIC_ASSERT(kSeqStringTag == 0);
- __ Or(t4, t0, Operand(t1));
- __ And(t4, t4, Operand(kStringRepresentationMask));
- __ Branch(&string_add_runtime, ne, t4, Operand(zero_reg));
+ __ And(t4, t0, Operand(kStringRepresentationMask));
+
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ Label skip_first_add;
+ __ Branch(&skip_first_add, ne, t4, Operand(zero_reg));
+ __ Branch(USE_DELAY_SLOT, &first_prepared);
+ __ addiu(t3, a0, SeqAsciiString::kHeaderSize - kHeapObjectTag);
+ __ bind(&skip_first_add);
+ // External string: rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ And(t4, t0, Operand(kShortExternalStringMask));
+ __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
+ __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset));
+ __ bind(&first_prepared);
- // Now check if both strings have the same encoding (ASCII/Two-byte).
- // a0: first string
- // a1: second string
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ And(t4, t1, Operand(kStringRepresentationMask));
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ Label skip_second_add;
+ __ Branch(&skip_second_add, ne, t4, Operand(zero_reg));
+ __ Branch(USE_DELAY_SLOT, &second_prepared);
+ __ addiu(a1, a1, SeqAsciiString::kHeaderSize - kHeapObjectTag);
+ __ bind(&skip_second_add);
+ // External string: rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ And(t4, t1, Operand(kShortExternalStringMask));
+ __ Branch(&call_runtime, ne, t4, Operand(zero_reg));
+ __ lw(a1, FieldMemOperand(a1, ExternalString::kResourceDataOffset));
+ __ bind(&second_prepared);
+
+ Label non_ascii_string_add_flat_result;
+ // t3: first character of first string
+ // a1: first character of second string
// a2: length of first string
// a3: length of second string
- // t0: first string instance type
- // t1: second string instance type
// t2: sum of lengths.
- Label non_ascii_string_add_flat_result;
- ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test.
- __ xor_(t3, t1, t0);
- __ And(t3, t3, Operand(kStringEncodingMask));
- __ Branch(&string_add_runtime, ne, t3, Operand(zero_reg));
- // And see if it's ASCII (0) or two-byte (1).
- __ And(t3, t0, Operand(kStringEncodingMask));
- __ Branch(&non_ascii_string_add_flat_result, eq, t3, Operand(zero_reg));
-
- // Both strings are sequential ASCII strings. We also know that they are
- // short (since the sum of the lengths is less than kMinNonFlatLength).
- // t2: length of resulting flat string
- __ AllocateAsciiString(t3, t2, t0, t1, t4, &string_add_runtime);
- // Locate first character of result.
- __ Addu(t2, t3, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument.
- __ Addu(a0, a0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // a0: first character of first string.
- // a1: second string.
+ // Both strings have the same encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ And(t4, t1, Operand(kStringEncodingMask));
+ __ Branch(&non_ascii_string_add_flat_result, eq, t4, Operand(zero_reg));
+
+ __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime);
+ __ Addu(t2, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ // v0: result string.
+ // t3: first character of first string.
+ // a1: first character of second string
// a2: length of first string.
// a3: length of second string.
// t2: first character of result.
- // t3: result string.
- StringHelper::GenerateCopyCharacters(masm, t2, a0, a2, t0, true);
- // Load second argument and locate first character.
- __ Addu(a1, a1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // a1: first character of second string.
- // a3: length of second string.
+ StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true);
// t2: next character of result.
- // t3: result string.
StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true);
- __ mov(v0, t3);
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
__ bind(&non_ascii_string_add_flat_result);
- // Both strings are sequential two byte strings.
- // a0: first string.
- // a1: second string.
- // a2: length of first string.
- // a3: length of second string.
- // t2: sum of length of strings.
- __ AllocateTwoByteString(t3, t2, t0, t1, t4, &string_add_runtime);
- // a0: first string.
- // a1: second string.
- // a2: length of first string.
- // a3: length of second string.
- // t3: result string.
-
- // Locate first character of result.
- __ Addu(t2, t3, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument.
- __ Addu(a0, a0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // a0: first character of first string.
- // a1: second string.
+ __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime);
+ __ Addu(t2, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ // v0: result string.
+ // t3: first character of first string.
+ // a1: first character of second string.
// a2: length of first string.
// a3: length of second string.
// t2: first character of result.
- // t3: result string.
- StringHelper::GenerateCopyCharacters(masm, t2, a0, a2, t0, false);
-
- // Locate first character of second argument.
- __ Addu(a1, a1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
-
- // a1: first character of second string.
- // a3: length of second string.
- // t2: next character of result (after copy of first string).
- // t3: result string.
+ StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false);
+ // t2: next character of result.
StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false);
- __ mov(v0, t3);
__ IncrementCounter(counters->string_add_native(), 1, a2, a3);
- __ Addu(sp, sp, Operand(2 * kPointerSize));
- __ Ret();
+ __ DropAndRet(2);
// Just jump to runtime to add the two strings.
- __ bind(&string_add_runtime);
+ __ bind(&call_runtime);
__ TailCallRuntime(Runtime::kStringAdd, 2, 1);
if (call_builtin.is_linked()) {
@@ -6737,15 +6786,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
__ And(a2, a1, Operand(a0));
__ JumpIfSmi(a2, &generic_stub);
__ GetObjectType(a0, a2, a2);
- __ Branch(&miss, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ Branch(&maybe_undefined1, ne, a2, Operand(HEAP_NUMBER_TYPE));
__ GetObjectType(a1, a2, a2);
- __ Branch(&miss, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or FPU is unsupported.
@@ -6767,24 +6816,39 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ BranchF(&fpu_lt, NULL, lt, f0, f2);
// Otherwise it's greater, so just fall thru, and return.
- __ Ret(USE_DELAY_SLOT);
- __ li(v0, Operand(GREATER)); // In delay slot.
+ __ li(v0, Operand(GREATER));
+ __ Ret();
__ bind(&fpu_eq);
- __ Ret(USE_DELAY_SLOT);
- __ li(v0, Operand(EQUAL)); // In delay slot.
+ __ li(v0, Operand(EQUAL));
+ __ Ret();
__ bind(&fpu_lt);
- __ Ret(USE_DELAY_SLOT);
- __ li(v0, Operand(LESS)); // In delay slot.
-
- __ bind(&unordered);
+ __ li(v0, Operand(LESS));
+ __ Ret();
}
+ __ bind(&unordered);
+
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
__ bind(&generic_stub);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ Branch(&miss, ne, a0, Operand(at));
+ __ GetObjectType(a1, a2, a2);
+ __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ __ Branch(&unordered, eq, a1, Operand(at));
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -6832,6 +6896,8 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = a1;
Register right = a0;
@@ -6859,41 +6925,52 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
Label left_ne_right;
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
- __ Branch(&left_ne_right, ne, left, Operand(right), USE_DELAY_SLOT);
+ __ Branch(&left_ne_right, ne, left, Operand(right));
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, zero_reg); // In the delay slot.
- __ Ret();
__ bind(&left_ne_right);
// Handle not identical strings.
// Check that both strings are symbols. If they are, we're done
// because we already know they are not identical.
- ASSERT(GetCondition() == eq);
- STATIC_ASSERT(kSymbolTag != 0);
- __ And(tmp3, tmp1, Operand(tmp2));
- __ And(tmp5, tmp3, Operand(kIsSymbolMask));
- Label is_symbol;
- __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg), USE_DELAY_SLOT);
- __ mov(v0, a0); // In the delay slot.
- // Make sure a0 is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(a0));
- __ Ret();
- __ bind(&is_symbol);
+ if (equality) {
+ ASSERT(GetCondition() == eq);
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ And(tmp3, tmp1, Operand(tmp2));
+ __ And(tmp5, tmp3, Operand(kIsSymbolMask));
+ Label is_symbol;
+ __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg));
+ // Make sure a0 is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(a0));
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, a0); // In the delay slot.
+ __ bind(&is_symbol);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ JumpIfBothInstanceTypesAreNotSequentialAscii(tmp1, tmp2, tmp3, tmp4,
- &runtime);
+ __ JumpIfBothInstanceTypesAreNotSequentialAscii(
+ tmp1, tmp2, tmp3, tmp4, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2, tmp3);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2, tmp3);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3, tmp4);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -6906,50 +6983,54 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ And(a2, a1, Operand(a0));
__ JumpIfSmi(a2, &miss);
- // Compare lhs, a2 holds the map, a3 holds the type_reg
- __ GetObjectType(a0, a2, a3);
- __ Branch(&miss, ne, a3, Operand(JS_OBJECT_TYPE));
- __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset));
- __ And(a2, a2, Operand(1 << Map::kUseUserObjectComparison));
- __ Branch(&miss, eq, a2, Operand(1 << Map::kUseUserObjectComparison));
-
-
- // Compare rhs, a2 holds the map, a3 holds the type_reg
- __ GetObjectType(a1, a2, a3);
- __ Branch(&miss, ne, a3, Operand(JS_OBJECT_TYPE));
- __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset));
- __ And(a2, a2, Operand(1 << Map::kUseUserObjectComparison));
- __ Branch(&miss, eq, a2, Operand(1 << Map::kUseUserObjectComparison));
+ __ GetObjectType(a0, a2, a2);
+ __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
+ __ GetObjectType(a1, a2, a2);
+ __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE));
ASSERT(GetCondition() == eq);
- __ Subu(v0, a0, Operand(a1));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ subu(v0, a0, a1);
__ bind(&miss);
GenerateMiss(masm);
}
-void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
- __ Push(a1, a0);
- __ push(ra);
+void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
+ Label miss;
+ __ And(a2, a1, a0);
+ __ JumpIfSmi(a2, &miss);
+ __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
+ __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
+ __ Branch(&miss, ne, a2, Operand(known_map_));
+ __ Branch(&miss, ne, a3, Operand(known_map_));
+
+ __ Ret(USE_DELAY_SLOT);
+ __ subu(v0, a0, a1);
+
+ __ bind(&miss);
+ GenerateMiss(masm);
+}
- // Call the runtime system in a fresh internal frame.
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
- masm->isolate());
+void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
+ // Call the runtime system in a fresh internal frame.
+ ExternalReference miss =
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a1, a0);
+ __ push(ra);
+ __ Push(a1, a0);
__ li(t0, Operand(Smi::FromInt(op_)));
- __ push(t0);
- __ CallExternalReference(miss, 3);
+ __ addiu(sp, sp, -kPointerSize);
+ __ CallExternalReference(miss, 3, USE_DELAY_SLOT);
+ __ sw(t0, MemOperand(sp)); // In the delay slot.
+ // Compute the entry point of the rewritten stub.
+ __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Restore registers.
+ __ Pop(a1, a0, ra);
}
- // Compute the entry point of the rewritten stub.
- __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
- // Restore registers.
- __ pop(ra);
- __ pop(a0);
- __ pop(a1);
__ Jump(a2);
}
@@ -7001,8 +7082,10 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
// Push return address (accessible to GC through exit frame pc).
// This spot for ra was reserved in EnterExitFrame.
masm->sw(ra, MemOperand(sp, kCArgsSlotsSize));
- masm->li(ra, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
- RelocInfo::CODE_TARGET), true);
+ masm->li(ra,
+ Operand(reinterpret_cast<intptr_t>(GetCode().location()),
+ RelocInfo::CODE_TARGET),
+ CONSTANT_SIZE);
// Call the function.
masm->Jump(t9);
// Make sure the stored 'ra' points to this position.
@@ -7021,7 +7104,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// scratch0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -7050,9 +7133,15 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ Branch(done, eq, entity_name, Operand(tmp));
if (i != kInlinedProbes - 1) {
+ // Load the hole ready for use below:
+ __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
+
// Stop if found the property.
__ Branch(miss, eq, entity_name, Operand(Handle<String>(name)));
+ Label the_hole;
+ __ Branch(&the_hole, eq, entity_name, Operand(tmp));
+
// Check if the entry name is not a symbol.
__ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
__ lbu(entity_name,
@@ -7060,6 +7149,8 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ And(scratch0, entity_name, Operand(kIsSymbolMask));
__ Branch(miss, eq, scratch0, Operand(zero_reg));
+ __ bind(&the_hole);
+
// Restore the properties.
__ lw(properties,
FieldMemOperand(receiver, JSObject::kPropertiesOffset));
@@ -7246,17 +7337,17 @@ void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
// treated as a lookup success. For positive lookup probing failure
// should be treated as lookup failure.
if (mode_ == POSITIVE_LOOKUP) {
+ __ Ret(USE_DELAY_SLOT);
__ mov(result, zero_reg);
- __ Ret();
}
__ bind(&in_dictionary);
+ __ Ret(USE_DELAY_SLOT);
__ li(result, 1);
- __ Ret();
__ bind(&not_in_dictionary);
+ __ Ret(USE_DELAY_SLOT);
__ mov(result, zero_reg);
- __ Ret();
}
@@ -7265,39 +7356,46 @@ struct AheadOfTimeWriteBarrierStubList {
RememberedSetAction action;
};
+#define REG(Name) { kRegister_ ## Name ## _Code }
-struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
+static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// Used in RegExpExecStub.
- { s2, s0, t3, EMIT_REMEMBERED_SET },
- { s2, a2, t3, EMIT_REMEMBERED_SET },
+ { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET },
+ { REG(s2), REG(a2), REG(t3), EMIT_REMEMBERED_SET },
// Used in CompileArrayPushCall.
// Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
// Also used in KeyedStoreIC::GenerateGeneric.
- { a3, t0, t1, EMIT_REMEMBERED_SET },
+ { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
// Used in CompileStoreGlobal.
- { t0, a1, a2, OMIT_REMEMBERED_SET },
+ { REG(t0), REG(a1), REG(a2), OMIT_REMEMBERED_SET },
// Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
- { a1, a2, a3, EMIT_REMEMBERED_SET },
- { a3, a2, a1, EMIT_REMEMBERED_SET },
+ { REG(a1), REG(a2), REG(a3), EMIT_REMEMBERED_SET },
+ { REG(a3), REG(a2), REG(a1), EMIT_REMEMBERED_SET },
// Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
- { a2, a1, a3, EMIT_REMEMBERED_SET },
- { a3, a1, a2, EMIT_REMEMBERED_SET },
+ { REG(a2), REG(a1), REG(a3), EMIT_REMEMBERED_SET },
+ { REG(a3), REG(a1), REG(a2), EMIT_REMEMBERED_SET },
// KeyedStoreStubCompiler::GenerateStoreFastElement.
- { t0, a2, a3, EMIT_REMEMBERED_SET },
+ { REG(a3), REG(a2), REG(t0), EMIT_REMEMBERED_SET },
+ { REG(a2), REG(a3), REG(t0), EMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateSmiOnlyToObject
// and ElementsTransitionGenerator::GenerateSmiOnlyToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
- { a2, a3, t5, EMIT_REMEMBERED_SET },
+ { REG(a2), REG(a3), REG(t5), EMIT_REMEMBERED_SET },
+ { REG(a2), REG(a3), REG(t5), OMIT_REMEMBERED_SET },
// ElementsTransitionGenerator::GenerateDoubleToObject
- { t2, a2, a0, EMIT_REMEMBERED_SET },
- { a2, t2, t5, EMIT_REMEMBERED_SET },
+ { REG(t2), REG(a2), REG(a0), EMIT_REMEMBERED_SET },
+ { REG(a2), REG(t2), REG(t5), EMIT_REMEMBERED_SET },
+ // StoreArrayLiteralElementStub::Generate
+ { REG(t1), REG(a0), REG(t2), EMIT_REMEMBERED_SET },
// Null termination.
- { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
+ { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
+#undef REG
+
bool RecordWriteStub::IsPregenerated() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
if (object_.is(entry->object) &&
@@ -7324,7 +7422,7 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
- for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
+ for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
RecordWriteStub stub(entry->object,
@@ -7530,6 +7628,69 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
}
+void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : element value to store
+ // -- a1 : array literal
+ // -- a2 : map of array literal
+ // -- a3 : element index as smi
+ // -- t0 : array literal index in function as smi
+ // -----------------------------------
+
+ Label element_done;
+ Label double_elements;
+ Label smi_element;
+ Label slow_elements;
+ Label fast_elements;
+
+ __ CheckFastElements(a2, t1, &double_elements);
+ // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ __ JumpIfSmi(a0, &smi_element);
+ __ CheckFastSmiOnlyElements(a2, t1, &fast_elements);
+
+ // Store into the array literal requires a elements transition. Call into
+ // the runtime.
+ __ bind(&slow_elements);
+ // call.
+ __ Push(a1, a3, a0);
+ __ lw(t1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ lw(t1, FieldMemOperand(t1, JSFunction::kLiteralsOffset));
+ __ Push(t1, t0);
+ __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
+
+ // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ __ bind(&fast_elements);
+ __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
+ __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(t2, t1, t2);
+ __ Addu(t2, t2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sw(a0, MemOperand(t2, 0));
+ // Update the write barrier for the array store.
+ __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, a0);
+
+ // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+ // FAST_ELEMENTS, and value is Smi.
+ __ bind(&smi_element);
+ __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
+ __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize);
+ __ Addu(t2, t1, t2);
+ __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize));
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, a0);
+
+ // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ __ bind(&double_elements);
+ __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset));
+ __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2,
+ &slow_elements);
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, a0);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/mips/code-stubs-mips.h b/src/3rdparty/v8/src/mips/code-stubs-mips.h
index 94ef2af..e0954d8 100644
--- a/src/3rdparty/v8/src/mips/code-stubs-mips.h
+++ b/src/3rdparty/v8/src/mips/code-stubs-mips.h
@@ -137,7 +137,7 @@ class UnaryOpStub: public CodeStub {
return UnaryOpIC::ToState(operand_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
@@ -236,7 +236,7 @@ class BinaryOpStub: public CodeStub {
return BinaryOpIC::ToState(operands_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_binary_op_type(operands_type_);
code->set_binary_op_result_type(result_type_);
}
@@ -245,6 +245,70 @@ class BinaryOpStub: public CodeStub {
};
+class StringHelper : public AllStatic {
+ public:
+ // Generate code for copying characters using a simple loop. This should only
+ // be used in places where the number of characters is small and the
+ // additional setup and checking in GenerateCopyCharactersLong adds too much
+ // overhead. Copying of overlapping regions is not supported.
+ // Dest register ends at the position after the last character written.
+ static void GenerateCopyCharacters(MacroAssembler* masm,
+ Register dest,
+ Register src,
+ Register count,
+ Register scratch,
+ bool ascii);
+
+ // Generate code for copying a large number of characters. This function
+ // is allowed to spend extra time setting up conditions to make copying
+ // faster. Copying of overlapping regions is not supported.
+ // Dest register ends at the position after the last character written.
+ static void GenerateCopyCharactersLong(MacroAssembler* masm,
+ Register dest,
+ Register src,
+ Register count,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Register scratch4,
+ Register scratch5,
+ int flags);
+
+
+ // Probe the symbol table for a two character string. If the string is
+ // not found by probing a jump to the label not_found is performed. This jump
+ // does not guarantee that the string is not in the symbol table. If the
+ // string is found the code falls through with the string in register r0.
+ // Contents of both c1 and c2 registers are modified. At the exit c1 is
+ // guaranteed to contain halfword with low and high bytes equal to
+ // initial contents of c1 and c2 respectively.
+ static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
+ Register c1,
+ Register c2,
+ Register scratch1,
+ Register scratch2,
+ Register scratch3,
+ Register scratch4,
+ Register scratch5,
+ Label* not_found);
+
+ // Generate string hash.
+ static void GenerateHashInit(MacroAssembler* masm,
+ Register hash,
+ Register character);
+
+ static void GenerateHashAddCharacter(MacroAssembler* masm,
+ Register hash,
+ Register character);
+
+ static void GenerateHashGetHash(MacroAssembler* masm,
+ Register hash);
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
+};
+
+
// Flag that indicates how to generate code for the stub StringAddStub.
enum StringAddFlags {
NO_STRING_ADD_FLAGS = 0,
@@ -586,13 +650,6 @@ class RecordWriteStub: public CodeStub {
SaveFPRegsModeBits::encode(save_fp_regs_mode_);
}
- bool MustBeInStubCache() {
- // All stubs must be registered in the stub cache
- // otherwise IncrementalMarker would not be able to find
- // and patch it.
- return true;
- }
-
void Activate(Code* code) {
code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
}
diff --git a/src/3rdparty/v8/src/mips/codegen-mips.cc b/src/3rdparty/v8/src/mips/codegen-mips.cc
index e9fe232..9acccdc 100644
--- a/src/3rdparty/v8/src/mips/codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/codegen-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -37,6 +37,22 @@ namespace internal {
#define __ ACCESS_MASM(masm)
+UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) {
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ return NULL;
+}
+
+
+UnaryMathFunction CreateSqrtFunction() {
+ return &sqrt;
+}
+
// -------------------------------------------------------------------------
// Platform-specific RuntimeCallHelper functions.
@@ -89,13 +105,18 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- a3 : target map, scratch for subsequent call
// -- t0 : scratch (elements)
// -----------------------------------
- Label loop, entry, convert_hole, gc_required;
+ Label loop, entry, convert_hole, gc_required, only_change_map, done;
bool fpu_supported = CpuFeatures::IsSupported(FPU);
- __ push(ra);
Register scratch = t6;
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
__ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
+ __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
+ __ Branch(&only_change_map, eq, at, Operand(t0));
+
+ __ push(ra);
__ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
// t0: source FixedArray
// t1: number of elements (smi-tagged)
@@ -105,10 +126,10 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize);
__ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
// t2: destination FixedDoubleArray, not tagged as heap object
+ // Set destination FixedDoubleArray's length and map.
__ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex);
- __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
- // Set destination FixedDoubleArray's length.
__ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
+ __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
// Update receiver's map.
__ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
@@ -118,7 +139,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
t5,
kRAHasBeenSaved,
kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
+ OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created FixedDoubleArray.
__ Addu(a3, t2, Operand(kHeapObjectTag));
@@ -149,6 +170,18 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ Branch(&entry);
+ __ bind(&only_change_map);
+ __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+ __ RecordWriteField(a2,
+ HeapObject::kMapOffset,
+ a3,
+ t5,
+ kRAHasBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ Branch(&done);
+
// Call into runtime if GC is required.
__ bind(&gc_required);
__ pop(ra);
@@ -159,10 +192,9 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ lw(t5, MemOperand(a3));
__ Addu(a3, a3, kIntSize);
// t5: current element
- __ JumpIfNotSmi(t5, &convert_hole);
+ __ UntagAndJumpIfNotSmi(t5, t5, &convert_hole);
// Normal smi, convert to double and store.
- __ SmiUntag(t5);
if (fpu_supported) {
CpuFeatures::Scope scope(FPU);
__ mtc1(t5, f0);
@@ -186,6 +218,13 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// Hole found, store the-hole NaN.
__ bind(&convert_hole);
+ if (FLAG_debug_code) {
+ // Restore a "smi-untagged" heap object.
+ __ SmiTag(t5);
+ __ Or(t5, t5, Operand(1));
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ Assert(eq, "object found in smi-only array", at, Operand(t5));
+ }
__ sw(t0, MemOperand(t3)); // mantissa
__ sw(t1, MemOperand(t3, kIntSize)); // exponent
__ Addu(t3, t3, kDoubleSize);
@@ -195,6 +234,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
if (!fpu_supported) __ Pop(a1, a0);
__ pop(ra);
+ __ bind(&done);
}
@@ -208,10 +248,16 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -- a3 : target map, scratch for subsequent call
// -- t0 : scratch (elements)
// -----------------------------------
- Label entry, loop, convert_hole, gc_required;
- __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
+ Label entry, loop, convert_hole, gc_required, only_change_map;
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
__ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
+ __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
+ __ Branch(&only_change_map, eq, at, Operand(t0));
+
+ __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
+
__ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
// t0: source FixedArray
// t1: number of elements (smi-tagged)
@@ -221,10 +267,10 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ Addu(a0, a0, FixedDoubleArray::kHeaderSize);
__ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
// t2: destination FixedArray, not tagged as heap object
+ // Set destination FixedDoubleArray's length and map.
__ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
- __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
- // Set destination FixedDoubleArray's length.
__ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
+ __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
// Prepare for conversion loop.
__ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
@@ -283,16 +329,6 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ Branch(&loop, lt, a3, Operand(t1));
__ MultiPop(a2.bit() | a3.bit() | a0.bit() | a1.bit());
- // Update receiver's map.
- __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
- __ RecordWriteField(a2,
- HeapObject::kMapOffset,
- a3,
- t5,
- kRAHasBeenSaved,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created and filled FixedArray.
__ sw(t2, FieldMemOperand(a2, JSObject::kElementsOffset));
__ RecordWriteField(a2,
@@ -304,6 +340,110 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ pop(ra);
+
+ __ bind(&only_change_map);
+ // Update receiver's map.
+ __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
+ __ RecordWriteField(a2,
+ HeapObject::kMapOffset,
+ a3,
+ t5,
+ kRAHasNotBeenSaved,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+}
+
+
+void StringCharLoadGenerator::Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime) {
+ // Fetch the instance type of the receiver into result register.
+ __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
+
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ And(at, result, Operand(kIsIndirectStringMask));
+ __ Branch(&check_sequential, eq, at, Operand(zero_reg));
+
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ And(at, result, Operand(kSlicedNotConsMask));
+ __ Branch(&cons_string, eq, at, Operand(zero_reg));
+
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ lw(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
+ __ lw(string, FieldMemOperand(string, SlicedString::kParentOffset));
+ __ sra(at, result, kSmiTagSize);
+ __ Addu(index, index, at);
+ __ jmp(&indirect_string_loaded);
+
+ // Handle cons strings.
+ // Check whether the right hand side is the empty string (i.e. if
+ // this is really a flat string in a cons string). If that is not
+ // the case we would rather go to the runtime system now to flatten
+ // the string.
+ __ bind(&cons_string);
+ __ lw(result, FieldMemOperand(string, ConsString::kSecondOffset));
+ __ LoadRoot(at, Heap::kEmptyStringRootIndex);
+ __ Branch(call_runtime, ne, result, Operand(at));
+ // Get the first of the two strings and load its instance type.
+ __ lw(string, FieldMemOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
+ __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
+
+ // Distinguish sequential and external strings. Only these two string
+ // representations can reach here (slices and flat cons strings have been
+ // reduced to the underlying sequential or external string).
+ Label external_string, check_encoding;
+ __ bind(&check_sequential);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ And(at, result, Operand(kStringRepresentationMask));
+ __ Branch(&external_string, ne, at, Operand(zero_reg));
+
+ // Prepare sequential strings
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ Addu(string,
+ string,
+ SeqTwoByteString::kHeaderSize - kHeapObjectTag);
+ __ jmp(&check_encoding);
+
+ // Handle external strings.
+ __ bind(&external_string);
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ And(at, result, Operand(kIsIndirectStringMask));
+ __ Assert(eq, "external string expected, but not found",
+ at, Operand(zero_reg));
+ }
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ And(at, result, Operand(kShortExternalStringMask));
+ __ Branch(call_runtime, ne, at, Operand(zero_reg));
+ __ lw(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
+
+ Label ascii, done;
+ __ bind(&check_encoding);
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ And(at, result, Operand(kStringEncodingMask));
+ __ Branch(&ascii, ne, at, Operand(zero_reg));
+ // Two-byte string.
+ __ sll(at, index, 1);
+ __ Addu(at, string, at);
+ __ lhu(result, MemOperand(at));
+ __ jmp(&done);
+ __ bind(&ascii);
+ // Ascii string.
+ __ Addu(at, string, index);
+ __ lbu(result, MemOperand(at));
+ __ bind(&done);
}
#undef __
diff --git a/src/3rdparty/v8/src/mips/codegen-mips.h b/src/3rdparty/v8/src/mips/codegen-mips.h
index 4549509..e704c4f 100644
--- a/src/3rdparty/v8/src/mips/codegen-mips.h
+++ b/src/3rdparty/v8/src/mips/codegen-mips.h
@@ -75,6 +75,21 @@ class CodeGenerator: public AstVisitor {
};
+class StringCharLoadGenerator : public AllStatic {
+ public:
+ // Generates the code for handling different string types and loading the
+ // indexed character into |result|. We expect |index| as untagged input and
+ // |result| as untagged output.
+ static void Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(StringCharLoadGenerator);
+};
+
} } // namespace v8::internal
#endif // V8_MIPS_CODEGEN_MIPS_H_
diff --git a/src/3rdparty/v8/src/mips/constants-mips.cc b/src/3rdparty/v8/src/mips/constants-mips.cc
index d0a7af5..7d654f6 100644
--- a/src/3rdparty/v8/src/mips/constants-mips.cc
+++ b/src/3rdparty/v8/src/mips/constants-mips.cc
@@ -302,7 +302,7 @@ Instruction::Type Instruction::InstructionType() const {
return kRegisterType;
};
break;
- // 16 bits Immediate type instructions. eg: addi dest, src, imm16.
+ // 16 bits Immediate type instructions. e.g.: addi dest, src, imm16.
case REGIMM:
case BEQ:
case BNE:
@@ -337,7 +337,7 @@ Instruction::Type Instruction::InstructionType() const {
case SWC1:
case SDC1:
return kImmediateType;
- // 26 bits immediate type instructions. eg: j imm26.
+ // 26 bits immediate type instructions. e.g.: j imm26.
case J:
case JAL:
return kJumpType;
diff --git a/src/3rdparty/v8/src/mips/constants-mips.h b/src/3rdparty/v8/src/mips/constants-mips.h
index d76ae59..fd04722 100644
--- a/src/3rdparty/v8/src/mips/constants-mips.h
+++ b/src/3rdparty/v8/src/mips/constants-mips.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -39,24 +39,33 @@
#define UNSUPPORTED_MIPS() v8::internal::PrintF("Unsupported instruction.\n")
+enum ArchVariants {
+ kMips32r2,
+ kMips32r1,
+ kLoongson
+};
#ifdef _MIPS_ARCH_MIPS32R2
- #define mips32r2 1
+ static const ArchVariants kArchVariant = kMips32r2;
+#elif _MIPS_ARCH_LOONGSON
+// The loongson flag refers to the LOONGSON architectures based on MIPS-III,
+// which predates (and is a subset of) the mips32r2 and r1 architectures.
+ static const ArchVariants kArchVariant = kLoongson;
#else
- #define mips32r2 0
+ static const ArchVariants kArchVariant = kMips32r1;
#endif
#if(defined(__mips_hard_float) && __mips_hard_float != 0)
// Use floating-point coprocessor instructions. This flag is raised when
// -mhard-float is passed to the compiler.
-static const bool IsMipsSoftFloatABI = false;
+const bool IsMipsSoftFloatABI = false;
#elif(defined(__mips_soft_float) && __mips_soft_float != 0)
// Not using floating-point coprocessor instructions. This flag is raised when
// -msoft-float is passed to the compiler.
-static const bool IsMipsSoftFloatABI = true;
+const bool IsMipsSoftFloatABI = true;
#else
-static const bool IsMipsSoftFloatABI = true;
+const bool IsMipsSoftFloatABI = true;
#endif
@@ -74,46 +83,45 @@ namespace internal {
// Registers and FPURegisters.
// Number of general purpose registers.
-static const int kNumRegisters = 32;
-static const int kInvalidRegister = -1;
+const int kNumRegisters = 32;
+const int kInvalidRegister = -1;
// Number of registers with HI, LO, and pc.
-static const int kNumSimuRegisters = 35;
+const int kNumSimuRegisters = 35;
// In the simulator, the PC register is simulated as the 34th register.
-static const int kPCRegister = 34;
+const int kPCRegister = 34;
// Number coprocessor registers.
-static const int kNumFPURegisters = 32;
-static const int kInvalidFPURegister = -1;
+const int kNumFPURegisters = 32;
+const int kInvalidFPURegister = -1;
// FPU (coprocessor 1) control registers. Currently only FCSR is implemented.
-static const int kFCSRRegister = 31;
-static const int kInvalidFPUControlRegister = -1;
-static const uint32_t kFPUInvalidResult = (uint32_t) (1 << 31) - 1;
+const int kFCSRRegister = 31;
+const int kInvalidFPUControlRegister = -1;
+const uint32_t kFPUInvalidResult = (uint32_t) (1 << 31) - 1;
// FCSR constants.
-static const uint32_t kFCSRInexactFlagBit = 2;
-static const uint32_t kFCSRUnderflowFlagBit = 3;
-static const uint32_t kFCSROverflowFlagBit = 4;
-static const uint32_t kFCSRDivideByZeroFlagBit = 5;
-static const uint32_t kFCSRInvalidOpFlagBit = 6;
-
-static const uint32_t kFCSRInexactFlagMask = 1 << kFCSRInexactFlagBit;
-static const uint32_t kFCSRUnderflowFlagMask = 1 << kFCSRUnderflowFlagBit;
-static const uint32_t kFCSROverflowFlagMask = 1 << kFCSROverflowFlagBit;
-static const uint32_t kFCSRDivideByZeroFlagMask = 1 << kFCSRDivideByZeroFlagBit;
-static const uint32_t kFCSRInvalidOpFlagMask = 1 << kFCSRInvalidOpFlagBit;
-
-static const uint32_t kFCSRFlagMask =
+const uint32_t kFCSRInexactFlagBit = 2;
+const uint32_t kFCSRUnderflowFlagBit = 3;
+const uint32_t kFCSROverflowFlagBit = 4;
+const uint32_t kFCSRDivideByZeroFlagBit = 5;
+const uint32_t kFCSRInvalidOpFlagBit = 6;
+
+const uint32_t kFCSRInexactFlagMask = 1 << kFCSRInexactFlagBit;
+const uint32_t kFCSRUnderflowFlagMask = 1 << kFCSRUnderflowFlagBit;
+const uint32_t kFCSROverflowFlagMask = 1 << kFCSROverflowFlagBit;
+const uint32_t kFCSRDivideByZeroFlagMask = 1 << kFCSRDivideByZeroFlagBit;
+const uint32_t kFCSRInvalidOpFlagMask = 1 << kFCSRInvalidOpFlagBit;
+
+const uint32_t kFCSRFlagMask =
kFCSRInexactFlagMask |
kFCSRUnderflowFlagMask |
kFCSROverflowFlagMask |
kFCSRDivideByZeroFlagMask |
kFCSRInvalidOpFlagMask;
-static const uint32_t kFCSRExceptionFlagMask =
- kFCSRFlagMask ^ kFCSRInexactFlagMask;
+const uint32_t kFCSRExceptionFlagMask = kFCSRFlagMask ^ kFCSRInexactFlagMask;
// Helper functions for converting between register numbers and names.
class Registers {
@@ -126,7 +134,7 @@ class Registers {
struct RegisterAlias {
int reg;
- const char *name;
+ const char* name;
};
static const int32_t kMaxValue = 0x7fffffff;
@@ -148,7 +156,7 @@ class FPURegisters {
struct RegisterAlias {
int creg;
- const char *name;
+ const char* name;
};
private:
@@ -177,67 +185,66 @@ enum SoftwareInterruptCodes {
// instructions (see Assembler::stop()).
// - Breaks larger than kMaxStopCode are simple breaks, dropping you into the
// debugger.
-static const uint32_t kMaxWatchpointCode = 31;
-static const uint32_t kMaxStopCode = 127;
+const uint32_t kMaxWatchpointCode = 31;
+const uint32_t kMaxStopCode = 127;
STATIC_ASSERT(kMaxWatchpointCode < kMaxStopCode);
// ----- Fields offset and length.
-static const int kOpcodeShift = 26;
-static const int kOpcodeBits = 6;
-static const int kRsShift = 21;
-static const int kRsBits = 5;
-static const int kRtShift = 16;
-static const int kRtBits = 5;
-static const int kRdShift = 11;
-static const int kRdBits = 5;
-static const int kSaShift = 6;
-static const int kSaBits = 5;
-static const int kFunctionShift = 0;
-static const int kFunctionBits = 6;
-static const int kLuiShift = 16;
-
-static const int kImm16Shift = 0;
-static const int kImm16Bits = 16;
-static const int kImm26Shift = 0;
-static const int kImm26Bits = 26;
-static const int kImm28Shift = 0;
-static const int kImm28Bits = 28;
+const int kOpcodeShift = 26;
+const int kOpcodeBits = 6;
+const int kRsShift = 21;
+const int kRsBits = 5;
+const int kRtShift = 16;
+const int kRtBits = 5;
+const int kRdShift = 11;
+const int kRdBits = 5;
+const int kSaShift = 6;
+const int kSaBits = 5;
+const int kFunctionShift = 0;
+const int kFunctionBits = 6;
+const int kLuiShift = 16;
+
+const int kImm16Shift = 0;
+const int kImm16Bits = 16;
+const int kImm26Shift = 0;
+const int kImm26Bits = 26;
+const int kImm28Shift = 0;
+const int kImm28Bits = 28;
// In branches and jumps immediate fields point to words, not bytes,
// and are therefore shifted by 2.
-static const int kImmFieldShift = 2;
-
-static const int kFsShift = 11;
-static const int kFsBits = 5;
-static const int kFtShift = 16;
-static const int kFtBits = 5;
-static const int kFdShift = 6;
-static const int kFdBits = 5;
-static const int kFCccShift = 8;
-static const int kFCccBits = 3;
-static const int kFBccShift = 18;
-static const int kFBccBits = 3;
-static const int kFBtrueShift = 16;
-static const int kFBtrueBits = 1;
+const int kImmFieldShift = 2;
+
+const int kFsShift = 11;
+const int kFsBits = 5;
+const int kFtShift = 16;
+const int kFtBits = 5;
+const int kFdShift = 6;
+const int kFdBits = 5;
+const int kFCccShift = 8;
+const int kFCccBits = 3;
+const int kFBccShift = 18;
+const int kFBccBits = 3;
+const int kFBtrueShift = 16;
+const int kFBtrueBits = 1;
// ----- Miscellaneous useful masks.
// Instruction bit masks.
-static const int kOpcodeMask = ((1 << kOpcodeBits) - 1) << kOpcodeShift;
-static const int kImm16Mask = ((1 << kImm16Bits) - 1) << kImm16Shift;
-static const int kImm26Mask = ((1 << kImm26Bits) - 1) << kImm26Shift;
-static const int kImm28Mask = ((1 << kImm28Bits) - 1) << kImm28Shift;
-static const int kRsFieldMask = ((1 << kRsBits) - 1) << kRsShift;
-static const int kRtFieldMask = ((1 << kRtBits) - 1) << kRtShift;
-static const int kRdFieldMask = ((1 << kRdBits) - 1) << kRdShift;
-static const int kSaFieldMask = ((1 << kSaBits) - 1) << kSaShift;
-static const int kFunctionFieldMask =
- ((1 << kFunctionBits) - 1) << kFunctionShift;
+const int kOpcodeMask = ((1 << kOpcodeBits) - 1) << kOpcodeShift;
+const int kImm16Mask = ((1 << kImm16Bits) - 1) << kImm16Shift;
+const int kImm26Mask = ((1 << kImm26Bits) - 1) << kImm26Shift;
+const int kImm28Mask = ((1 << kImm28Bits) - 1) << kImm28Shift;
+const int kRsFieldMask = ((1 << kRsBits) - 1) << kRsShift;
+const int kRtFieldMask = ((1 << kRtBits) - 1) << kRtShift;
+const int kRdFieldMask = ((1 << kRdBits) - 1) << kRdShift;
+const int kSaFieldMask = ((1 << kSaBits) - 1) << kSaShift;
+const int kFunctionFieldMask = ((1 << kFunctionBits) - 1) << kFunctionShift;
// Misc masks.
-static const int kHiMask = 0xffff << 16;
-static const int kLoMask = 0xffff;
-static const int kSignMask = 0x80000000;
-static const int kJumpAddrMask = (1 << (kImm26Bits + kImmFieldShift)) - 1;
+const int kHiMask = 0xffff << 16;
+const int kLoMask = 0xffff;
+const int kSignMask = 0x80000000;
+const int kJumpAddrMask = (1 << (kImm26Bits + kImmFieldShift)) - 1;
// ----- MIPS Opcodes and Function Fields.
// We use this presentation to stay close to the table representation in
@@ -529,7 +536,7 @@ enum FPURoundingMode {
kRoundToMinusInf = RM
};
-static const uint32_t kFPURoundingModeMask = 3 << 0;
+const uint32_t kFPURoundingModeMask = 3 << 0;
enum CheckForInexactConversion {
kCheckForInexactConversion,
@@ -749,7 +756,7 @@ class Instruction {
// Say if the instruction should not be used in a branch delay slot.
bool IsForbiddenInBranchDelay() const;
- // Say if the instruction 'links'. eg: jal, bal.
+ // Say if the instruction 'links'. e.g. jal, bal.
bool IsLinkingInstruction() const;
// Say if the instruction is a break or a trap.
bool IsTrap() const;
@@ -772,18 +779,18 @@ class Instruction {
// MIPS assembly various constants.
// C/C++ argument slots size.
-static const int kCArgSlotCount = 4;
-static const int kCArgsSlotsSize = kCArgSlotCount * Instruction::kInstrSize;
+const int kCArgSlotCount = 4;
+const int kCArgsSlotsSize = kCArgSlotCount * Instruction::kInstrSize;
// JS argument slots size.
-static const int kJSArgsSlotsSize = 0 * Instruction::kInstrSize;
+const int kJSArgsSlotsSize = 0 * Instruction::kInstrSize;
// Assembly builtins argument slots size.
-static const int kBArgsSlotsSize = 0 * Instruction::kInstrSize;
+const int kBArgsSlotsSize = 0 * Instruction::kInstrSize;
-static const int kBranchReturnOffset = 2 * Instruction::kInstrSize;
+const int kBranchReturnOffset = 2 * Instruction::kInstrSize;
-static const int kDoubleAlignmentBits = 3;
-static const int kDoubleAlignment = (1 << kDoubleAlignmentBits);
-static const int kDoubleAlignmentMask = kDoubleAlignment - 1;
+const int kDoubleAlignmentBits = 3;
+const int kDoubleAlignment = (1 << kDoubleAlignmentBits);
+const int kDoubleAlignmentMask = kDoubleAlignment - 1;
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/mips/cpu-mips.cc b/src/3rdparty/v8/src/mips/cpu-mips.cc
index 26e95fb..93ebeda 100644
--- a/src/3rdparty/v8/src/mips/cpu-mips.cc
+++ b/src/3rdparty/v8/src/mips/cpu-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -47,7 +47,7 @@ namespace v8 {
namespace internal {
-void CPU::Setup() {
+void CPU::SetUp() {
CpuFeatures::Probe();
}
@@ -64,15 +64,19 @@ void CPU::FlushICache(void* start, size_t size) {
}
#if !defined (USE_SIMULATOR)
+#if defined(ANDROID)
+ // Bionic cacheflush can typically run in userland, avoiding kernel call.
+ char *end = reinterpret_cast<char *>(start) + size;
+ cacheflush(
+ reinterpret_cast<intptr_t>(start), reinterpret_cast<intptr_t>(end), 0);
+#else // ANDROID
int res;
-
// See http://www.linux-mips.org/wiki/Cacheflush_Syscall.
res = syscall(__NR_cacheflush, start, size, ICACHE);
-
if (res) {
V8_Fatal(__FILE__, __LINE__, "Failed to flush the instruction cache");
}
-
+#endif // ANDROID
#else // USE_SIMULATOR.
// Not generating mips instructions for C-code. This means that we are
// building a mips emulator based target. We should notify the simulator
diff --git a/src/3rdparty/v8/src/mips/debug-mips.cc b/src/3rdparty/v8/src/mips/debug-mips.cc
index 5b3ae89..83f5f50 100644
--- a/src/3rdparty/v8/src/mips/debug-mips.cc
+++ b/src/3rdparty/v8/src/mips/debug-mips.cc
@@ -152,8 +152,8 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
#ifdef DEBUG
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
- __ mov(a0, zero_reg); // No arguments.
- __ li(a1, Operand(ExternalReference::debug_break(masm->isolate())));
+ __ PrepareCEntryArgs(0); // No arguments.
+ __ PrepareCEntryFunction(ExternalReference::debug_break(masm->isolate()));
CEntryStub ceb(1);
__ CallStub(&ceb);
@@ -243,14 +243,6 @@ void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateConstructCallDebugBreak(MacroAssembler* masm) {
- // Calling convention for construct call (from builtins-mips.cc).
- // -- a0 : number of arguments (not smi)
- // -- a1 : constructor function
- Generate_DebugBreakCallHelper(masm, a1.bit(), a0.bit());
-}
-
-
void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
// In places other than IC call sites it is expected that v0 is TOS which
// is an object - this is not generally the case so this should be used with
@@ -259,11 +251,43 @@ void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
+void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-mips.cc).
// ----------- S t a t e -------------
- // No registers used on entry.
+ // -- a1 : function
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, 0, 0);
+ Generate_DebugBreakCallHelper(masm, a1.bit(), 0);
+}
+
+
+void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-mips.cc).
+ // ----------- S t a t e -------------
+ // -- a1 : function
+ // -- a2 : cache cell for call target
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit(), 0);
+}
+
+
+void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
+ // Calling convention for CallConstructStub (from code-stubs-mips.cc).
+ // ----------- S t a t e -------------
+ // -- a0 : number of arguments (not smi)
+ // -- a1 : constructor function
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, a1.bit() , a0.bit());
+}
+
+
+void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) {
+ // Calling convention for CallConstructStub (from code-stubs-mips.cc).
+ // ----------- S t a t e -------------
+ // -- a0 : number of arguments (not smi)
+ // -- a1 : constructor function
+ // -- a2 : cache cell for call target
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, a1.bit() | a2.bit(), a0.bit());
}
diff --git a/src/3rdparty/v8/src/mips/deoptimizer-mips.cc b/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
index bc735df..51c2e46 100644
--- a/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
+++ b/src/3rdparty/v8/src/mips/deoptimizer-mips.cc
@@ -36,21 +36,12 @@ namespace v8 {
namespace internal {
-const int Deoptimizer::table_entry_size_ = 32;
-
-
int Deoptimizer::patch_size() {
const int kCallInstructionSizeInWords = 4;
return kCallInstructionSizeInWords * Assembler::kInstrSize;
}
-void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
- // Nothing to do. No new relocation information is written for lazy
- // deoptimization on MIPS.
-}
-
-
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
HandleScope scope;
AssertNoAllocation no_allocation;
@@ -59,58 +50,38 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// Get the optimized code.
Code* code = function->code();
+ Address code_start_address = code->instruction_start();
// Invalidate the relocation information, as it will become invalid by the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
- // For each return after a safepoint insert an absolute call to the
- // corresponding deoptimization entry.
- unsigned last_pc_offset = 0;
- SafepointTable table(function->code());
- for (unsigned i = 0; i < table.length(); i++) {
- unsigned pc_offset = table.GetPcOffset(i);
- SafepointEntry safepoint_entry = table.GetEntry(i);
- int deoptimization_index = safepoint_entry.deoptimization_index();
- int gap_code_size = safepoint_entry.gap_code_size();
- // Check that we did not shoot past next safepoint.
- CHECK(pc_offset >= last_pc_offset);
+ // For each LLazyBailout instruction insert a call to the corresponding
+ // deoptimization entry.
+ DeoptimizationInputData* deopt_data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
#ifdef DEBUG
- // Destroy the code which is not supposed to be run again.
- int instructions = (pc_offset - last_pc_offset) / Assembler::kInstrSize;
- CodePatcher destroyer(code->instruction_start() + last_pc_offset,
- instructions);
- for (int x = 0; x < instructions; x++) {
- destroyer.masm()->break_(0);
- }
+ Address prev_call_address = NULL;
#endif
- last_pc_offset = pc_offset;
- if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
- Address deoptimization_entry = Deoptimizer::GetDeoptimizationEntry(
- deoptimization_index, Deoptimizer::LAZY);
- last_pc_offset += gap_code_size;
- int call_size_in_bytes = MacroAssembler::CallSize(deoptimization_entry,
- RelocInfo::NONE);
- int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
- ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
- ASSERT(call_size_in_bytes <= patch_size());
- CodePatcher patcher(code->instruction_start() + last_pc_offset,
- call_size_in_words);
- patcher.masm()->Call(deoptimization_entry, RelocInfo::NONE);
- last_pc_offset += call_size_in_bytes;
- }
- }
+ for (int i = 0; i < deopt_data->DeoptCount(); i++) {
+ if (deopt_data->Pc(i)->value() == -1) continue;
+ Address call_address = code_start_address + deopt_data->Pc(i)->value();
+ Address deopt_entry = GetDeoptimizationEntry(i, LAZY);
+ int call_size_in_bytes = MacroAssembler::CallSize(deopt_entry,
+ RelocInfo::NONE);
+ int call_size_in_words = call_size_in_bytes / Assembler::kInstrSize;
+ ASSERT(call_size_in_bytes % Assembler::kInstrSize == 0);
+ ASSERT(call_size_in_bytes <= patch_size());
+ CodePatcher patcher(call_address, call_size_in_words);
+ patcher.masm()->Call(deopt_entry, RelocInfo::NONE);
+ ASSERT(prev_call_address == NULL ||
+ call_address >= prev_call_address + patch_size());
+ ASSERT(call_address + patch_size() <= code->instruction_end());
#ifdef DEBUG
- // Destroy the code which is not supposed to be run again.
- int instructions =
- (code->safepoint_table_offset() - last_pc_offset) / Assembler::kInstrSize;
- CodePatcher destroyer(code->instruction_start() + last_pc_offset,
- instructions);
- for (int x = 0; x < instructions; x++) {
- destroyer.masm()->break_(0);
- }
+ prev_call_address = call_address;
#endif
+ }
Isolate* isolate = code->GetIsolate();
@@ -148,7 +119,7 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
const int kInstrSize = Assembler::kInstrSize;
// This structure comes from FullCodeGenerator::EmitStackCheck.
// The call of the stack guard check has the following form:
- // sltu at, sp, t0
+ // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts)
// beq at, zero_reg, ok
// lui t9, <stack guard address> upper
// ori t9, <stack guard address> lower
@@ -196,7 +167,11 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
// Restore the sltu instruction so beq can be taken again.
CodePatcher patcher(pc_after - 6 * kInstrSize, 1);
- patcher.masm()->sltu(at, sp, t0);
+ if (FLAG_count_based_interrupts) {
+ patcher.masm()->slt(at, a3, zero_reg);
+ } else {
+ patcher.masm()->sltu(at, sp, t0);
+ }
// Replace the on-stack replacement address in the load-immediate (lui/ori
// pair) with the entry address of the normal stack-check code.
@@ -244,12 +219,13 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
ASSERT(Translation::BEGIN == opcode);
USE(opcode);
int count = iterator.Next();
+ iterator.Skip(1); // Drop JS frame count.
ASSERT(count == 1);
USE(count);
opcode = static_cast<Translation::Opcode>(iterator.Next());
USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+ ASSERT(Translation::JS_FRAME == opcode);
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
@@ -285,9 +261,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_ = new FrameDescription*[1];
output_[0] = new(output_frame_size) FrameDescription(
output_frame_size, function_);
-#ifdef DEBUG
- output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
-#endif
+ output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
// Clear the incoming parameters in the optimized frame to avoid
// confusing the garbage collector.
@@ -352,7 +326,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0] = input_;
output_[0]->SetPc(reinterpret_cast<uint32_t>(from_));
} else {
- // Setup the frame pointer and the context pointer.
+ // Set up the frame pointer and the context pointer.
output_[0]->SetRegister(fp.code(), input_->GetRegister(fp.code()));
output_[0]->SetRegister(cp.code(), input_->GetRegister(cp.code()));
@@ -375,15 +349,220 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
}
+void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
+
+ // Arguments adaptor can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // A marker value is used in place of the context.
+ output_offset -= kPointerSize;
+ intptr_t context = reinterpret_cast<intptr_t>(
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ output_frame->SetFrameSlot(output_offset, context);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context (adaptor sentinel)\n",
+ top_address + output_offset, output_offset, context);
+ }
+
+ // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* adaptor_trampoline =
+ builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ adaptor_trampoline->instruction_start() +
+ isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 7 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ uint32_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<uint32_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // Constructor function being invoked by the stub.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; constructor function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08x: [top + %d] <- 0x%08x ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ uint32_t pc = reinterpret_cast<uint32_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
// This code is very similar to ia32/arm code, but relies on register names
// (fp, sp) and how the frame is laid out.
-void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
- int frame_index) {
+void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
+ int frame_index) {
// Read the ast node id, function, and frame height for this output frame.
- Translation::Opcode opcode =
- static_cast<Translation::Opcode>(iterator->Next());
- USE(opcode);
- ASSERT(Translation::FRAME == opcode);
int node_id = iterator->Next();
JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
unsigned height = iterator->Next();
@@ -403,9 +582,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// Allocate and store the output frame description.
FrameDescription* output_frame =
new(output_frame_size) FrameDescription(output_frame_size, function);
-#ifdef DEBUG
- output_frame->SetKind(Code::FUNCTION);
-#endif
+ output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
bool is_bottommost = (0 == frame_index);
bool is_topmost = (output_count_ - 1 == frame_index);
@@ -493,9 +670,8 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
- if (is_topmost) {
- output_frame->SetRegister(cp.code(), value);
- }
+ output_frame->SetContext(value);
+ if (is_topmost) output_frame->SetRegister(cp.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n",
top_address + output_offset, output_offset, value);
@@ -759,10 +935,7 @@ void Deoptimizer::EntryGenerator::Generate() {
}
}
- // Set up the roots register.
- ExternalReference roots_array_start =
- ExternalReference::roots_array_start(isolate);
- __ li(roots, Operand(roots_array_start));
+ __ InitializeRootRegister();
__ pop(at); // Get continuation, leave pc on stack.
__ pop(ra);
@@ -771,32 +944,46 @@ void Deoptimizer::EntryGenerator::Generate() {
}
+// Maximum size of a table entry generated below.
+const int Deoptimizer::table_entry_size_ = 9 * Assembler::kInstrSize;
+
void Deoptimizer::TableEntryGenerator::GeneratePrologue() {
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm());
// Create a sequence of deoptimization entries. Note that any
// registers may be still live.
-
- Label done;
+ Label table_start;
+ __ bind(&table_start);
for (int i = 0; i < count(); i++) {
- int start = masm()->pc_offset();
- USE(start);
+ Label start;
+ __ bind(&start);
if (type() != EAGER) {
// Emulate ia32 like call by pushing return address to stack.
- __ push(ra);
+ __ addiu(sp, sp, -2 * kPointerSize);
+ __ sw(ra, MemOperand(sp, 1 * kPointerSize));
+ } else {
+ __ addiu(sp, sp, -1 * kPointerSize);
}
- __ li(at, Operand(i));
- __ push(at);
- __ Branch(&done);
+ // Jump over the remaining deopt entries (including this one).
+ // This code is always reached by calling Jump, which puts the target (label
+ // start) into t9.
+ const int remaining_entries = (count() - i) * table_entry_size_;
+ __ Addu(t9, t9, remaining_entries);
+ // 'at' was clobbered so we can only load the current entry value here.
+ __ li(at, i);
+ __ jr(t9); // Expose delay slot.
+ __ sw(at, MemOperand(sp, 0 * kPointerSize)); // In the delay slot.
// Pad the rest of the code.
- while (table_entry_size_ > (masm()->pc_offset() - start)) {
+ while (table_entry_size_ > (masm()->SizeOfCodeGeneratedSince(&start))) {
__ nop();
}
- ASSERT_EQ(table_entry_size_, masm()->pc_offset() - start);
+ ASSERT_EQ(table_entry_size_, masm()->SizeOfCodeGeneratedSince(&start));
}
- __ bind(&done);
+
+ ASSERT_EQ(masm()->SizeOfCodeGeneratedSince(&table_start),
+ count() * table_entry_size_);
}
#undef __
diff --git a/src/3rdparty/v8/src/mips/disasm-mips.cc b/src/3rdparty/v8/src/mips/disasm-mips.cc
index fde0c58..1d40c2c 100644
--- a/src/3rdparty/v8/src/mips/disasm-mips.cc
+++ b/src/3rdparty/v8/src/mips/disasm-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -515,7 +515,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
Format(instr, "cvt.w.d 'fd, 'fs");
break;
case CVT_L_D: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "cvt.l.d 'fd, 'fs");
} else {
Unknown(instr);
@@ -526,7 +526,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
Format(instr, "trunc.w.d 'fd, 'fs");
break;
case TRUNC_L_D: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "trunc.l.d 'fd, 'fs");
} else {
Unknown(instr);
@@ -592,7 +592,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
case L:
switch (instr->FunctionFieldRaw()) {
case CVT_D_L: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "cvt.d.l 'fd, 'fs");
} else {
Unknown(instr);
@@ -600,7 +600,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
break;
}
case CVT_S_L: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "cvt.s.l 'fd, 'fs");
} else {
Unknown(instr);
@@ -636,7 +636,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
if (instr->RsValue() == 0) {
Format(instr, "srl 'rd, 'rt, 'sa");
} else {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "rotr 'rd, 'rt, 'sa");
} else {
Unknown(instr);
@@ -653,7 +653,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
if (instr->SaValue() == 0) {
Format(instr, "srlv 'rd, 'rt, 'rs");
} else {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "rotrv 'rd, 'rt, 'rs");
} else {
Unknown(instr);
@@ -770,7 +770,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
case SPECIAL3:
switch (instr->FunctionFieldRaw()) {
case INS: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "ins 'rt, 'rs, 'sa, 'ss2");
} else {
Unknown(instr);
@@ -778,7 +778,7 @@ void Decoder::DecodeTypeRegister(Instruction* instr) {
break;
}
case EXT: {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
Format(instr, "ext 'rt, 'rs, 'sa, 'ss1");
} else {
Unknown(instr);
diff --git a/src/3rdparty/v8/src/mips/frames-mips.h b/src/3rdparty/v8/src/mips/frames-mips.h
index a2ebce6..2ed358a 100644
--- a/src/3rdparty/v8/src/mips/frames-mips.h
+++ b/src/3rdparty/v8/src/mips/frames-mips.h
@@ -36,9 +36,9 @@ namespace internal {
// Register lists.
// Note that the bit values must match those used in actual instruction
// encoding.
-static const int kNumRegs = 32;
+const int kNumRegs = 32;
-static const RegList kJSCallerSaved =
+const RegList kJSCallerSaved =
1 << 2 | // v0
1 << 3 | // v1
1 << 4 | // a0
@@ -54,7 +54,7 @@ static const RegList kJSCallerSaved =
1 << 14 | // t6
1 << 15; // t7
-static const int kNumJSCallerSaved = 14;
+const int kNumJSCallerSaved = 14;
// Return the code of the n-th caller-saved register available to JavaScript
@@ -63,7 +63,7 @@ int JSCallerSavedCode(int n);
// Callee-saved registers preserved when switching from C to JavaScript.
-static const RegList kCalleeSaved =
+const RegList kCalleeSaved =
1 << 16 | // s0
1 << 17 | // s1
1 << 18 | // s2
@@ -74,9 +74,9 @@ static const RegList kCalleeSaved =
1 << 23 | // s7 (cp in Javascript code)
1 << 30; // fp/s8
-static const int kNumCalleeSaved = 9;
+const int kNumCalleeSaved = 9;
-static const RegList kCalleeSavedFPU =
+const RegList kCalleeSavedFPU =
1 << 20 | // f20
1 << 22 | // f22
1 << 24 | // f24
@@ -84,9 +84,9 @@ static const RegList kCalleeSavedFPU =
1 << 28 | // f28
1 << 30; // f30
-static const int kNumCalleeSavedFPU = 6;
+const int kNumCalleeSavedFPU = 6;
-static const RegList kCallerSavedFPU =
+const RegList kCallerSavedFPU =
1 << 0 | // f0
1 << 2 | // f2
1 << 4 | // f4
@@ -101,20 +101,20 @@ static const RegList kCallerSavedFPU =
// Number of registers for which space is reserved in safepoints. Must be a
// multiple of 8.
-static const int kNumSafepointRegisters = 24;
+const int kNumSafepointRegisters = 24;
// Define the list of registers actually saved at safepoints.
// Note that the number of saved registers may be smaller than the reserved
// space, i.e. kNumSafepointSavedRegisters <= kNumSafepointRegisters.
-static const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
-static const int kNumSafepointSavedRegisters =
+const RegList kSafepointSavedRegisters = kJSCallerSaved | kCalleeSaved;
+const int kNumSafepointSavedRegisters =
kNumJSCallerSaved + kNumCalleeSaved;
typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
-static const int kUndefIndex = -1;
+const int kUndefIndex = -1;
// Map with indexes on stack that corresponds to codes of saved registers.
-static const int kSafepointRegisterStackIndexMap[kNumRegs] = {
+const int kSafepointRegisterStackIndexMap[kNumRegs] = {
kUndefIndex, // zero_reg
kUndefIndex, // at
0, // v0
@@ -154,13 +154,13 @@ static const int kSafepointRegisterStackIndexMap[kNumRegs] = {
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kStateOffset = 1 * kPointerSize;
- static const int kContextOffset = 2 * kPointerSize;
- static const int kFPOffset = 3 * kPointerSize;
- static const int kPCOffset = 4 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kCodeOffset = 1 * kPointerSize;
+ static const int kStateOffset = 2 * kPointerSize;
+ static const int kContextOffset = 3 * kPointerSize;
+ static const int kFPOffset = 4 * kPointerSize;
- static const int kSize = kPCOffset + kPointerSize;
+ static const int kSize = kFPOffset + kPointerSize;
};
@@ -195,6 +195,9 @@ class ExitFrameConstants : public AllStatic {
class StandardFrameConstants : public AllStatic {
public:
+ // Fixed part of the frame consists of return address, caller fp,
+ // context and function.
+ static const int kFixedFrameSize = 4 * kPointerSize;
static const int kExpressionsOffset = -3 * kPointerSize;
static const int kMarkerOffset = -2 * kPointerSize;
static const int kContextOffset = -1 * kPointerSize;
@@ -230,6 +233,8 @@ class JavaScriptFrameConstants : public AllStatic {
class ArgumentsAdaptorFrameConstants : public AllStatic {
public:
static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
+ static const int kFrameSize =
+ StandardFrameConstants::kFixedFrameSize + kPointerSize;
};
diff --git a/src/3rdparty/v8/src/mips/full-codegen-mips.cc b/src/3rdparty/v8/src/mips/full-codegen-mips.cc
index 5d0bc9a..2afad13 100644
--- a/src/3rdparty/v8/src/mips/full-codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/full-codegen-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,6 +42,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -125,7 +126,7 @@ class JumpPatchSite BASE_EMBEDDED {
// function.
//
// The live registers are:
-// o a1: the JS function object being called (ie, ourselves)
+// o a1: the JS function object being called (i.e. ourselves)
// o cp: our context
// o fp: our caller's frame pointer
// o sp: stack pointer
@@ -133,10 +134,12 @@ class JumpPatchSite BASE_EMBEDDED {
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-mips.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info) {
- ASSERT(info_ == NULL);
- info_ = info;
- scope_ = info->scope();
+void FullCodeGenerator::Generate() {
+ CompilationInfo* info = info_;
+ handler_table_ =
+ isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
@@ -151,7 +154,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// with undefined when called as functions (without an explicit
// receiver object). t1 is zero for method calls and non-zero for
// function calls.
- if (info->is_strict_mode() || info->is_native()) {
+ if (!info->is_classic_mode() || info->is_native()) {
Label ok;
__ Branch(&ok, eq, t1, Operand(zero_reg));
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
@@ -186,13 +189,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Possibly allocate a local context.
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is in a1.
__ push(a1);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -244,7 +246,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
ArgumentsAccessStub::Type type;
- if (is_strict_mode()) {
+ if (!is_classic_mode()) {
type = ArgumentsAccessStub::NEW_STRICT;
} else if (function()->has_duplicate_parameters()) {
type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
@@ -273,11 +275,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@@ -314,7 +316,36 @@ void FullCodeGenerator::ClearAccumulator() {
}
-void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ li(a2, Operand(profiling_counter_));
+ __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+ __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
+ __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing: if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ li(a2, Operand(profiling_counter_));
+ __ li(a3, Operand(Smi::FromInt(reset_value)));
+ __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 142;
+
+
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
+ Label* back_edge_target) {
// The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
// to make sure it is constant. Branch may emit a skip-or-jump sequence
// instead of the normal Branch. It seems that the "skip" part of that
@@ -323,16 +354,35 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Comment cmnt(masm_, "[ Stack check");
Label ok;
- __ LoadRoot(t0, Heap::kStackLimitRootIndex);
- __ sltu(at, sp, t0);
- __ beq(at, zero_reg, &ok);
- // CallStub will emit a li t9, ... first, so it is safe to use the delay slot.
- StackCheckStub stub;
- __ CallStub(&stub);
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ slt(at, a3, zero_reg);
+ __ beq(at, zero_reg, &ok);
+ // CallStub will emit a li t9 first, so it is safe to use the delay slot.
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ __ LoadRoot(t0, Heap::kStackLimitRootIndex);
+ __ sltu(at, sp, t0);
+ __ beq(at, zero_reg, &ok);
+ // CallStub will emit a li t9 first, so it is safe to use the delay slot.
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
RecordStackCheck(stmt->OsrEntryId());
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
@@ -355,6 +405,32 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(v0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ Branch(&ok, ge, a3, Operand(zero_reg));
+ __ push(v0);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(a2);
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(v0);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
@@ -715,17 +791,16 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
Variable* variable = proxy->var();
- bool binding_needs_init =
- mode == CONST || mode == CONST_HARMONY || mode == LET;
+ bool binding_needs_init = (function == NULL) &&
+ (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
@@ -813,9 +888,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
@@ -878,7 +950,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ Call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ Branch(&next_test, ne, v0, Operand(zero_reg));
@@ -928,7 +1000,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Register null_value = t1;
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
__ Branch(&exit, eq, a0, Operand(null_value));
-
+ PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
+ __ mov(a0, v0);
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(a0, &convert);
@@ -951,44 +1024,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
- Label next;
- // Preload a couple of values used in the loop.
- Register empty_fixed_array_value = t2;
- __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = t3;
- __ LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- __ mov(a1, a0);
- __ bind(&next);
-
- // Check that there are no elements. Register a1 contains the
- // current JS object we've reached through the prototype chain.
- __ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
- __ Branch(&call_runtime, ne, a2, Operand(empty_fixed_array_value));
-
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in a2 for the subsequent
- // prototype load.
- __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
- __ lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOrBitField3Offset));
- __ JumpIfSmi(a3, &call_runtime);
-
- // Check that there is an enum cache in the non-empty instance
- // descriptors (a3). This is the case if the next enumeration
- // index field does not contain a smi.
- __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset));
- __ JumpIfSmi(a3, &call_runtime);
-
- // For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- __ Branch(&check_prototype, eq, a1, Operand(a0));
- __ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset));
- __ Branch(&call_runtime, ne, a3, Operand(empty_fixed_array_value));
-
- // Load the prototype from the map and loop if non-null.
- __ bind(&check_prototype);
- __ lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset));
- __ Branch(&next, ne, a1, Operand(null_value));
+ __ CheckEnumCache(null_value, &call_runtime);
// The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration.
@@ -1016,7 +1052,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset));
__ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
- // Setup the four remaining stack slots.
+ // Set up the four remaining stack slots.
__ push(v0); // Map.
__ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
__ li(a0, Operand(Smi::FromInt(0)));
@@ -1027,6 +1063,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register v0. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(a1, cell);
+ __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
+ __ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
+
__ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
__ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -1040,6 +1086,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Push(a1, a0); // Fixed array length (as smi) and initial index.
// Generate code for doing the condition check.
+ PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
__ bind(&loop);
// Load the current count to a0, load the length to a1.
__ lw(a0, MemOperand(sp, 0 * kPointerSize));
@@ -1084,7 +1131,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(result_register(), a3);
// Perform the assignment as if via '='.
{ EffectContext context(this);
- EmitAssignment(stmt->each(), stmt->AssignmentId());
+ EmitAssignment(stmt->each());
}
// Generate code for the body of the loop.
@@ -1097,7 +1144,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Addu(a0, a0, Operand(Smi::FromInt(1)));
__ push(a0);
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &loop);
__ Branch(&loop);
// Remove the pointers stored on the stack.
@@ -1105,6 +1152,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Drop(5);
// Exit and decrement the loop depth.
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@@ -1123,7 +1171,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode_flag());
+ FastNewClosureStub stub(info->language_mode());
__ li(a0, Operand(info));
__ push(a0);
__ CallStub(&stub);
@@ -1189,13 +1237,13 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
__ bind(&fast);
}
- __ lw(a0, var->is_qml_global() ? QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1251,7 +1299,7 @@ void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
__ subu(at, v0, at); // Sub as compare: at == 0 on eq.
if (local->mode() == CONST) {
__ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
- __ movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
+ __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
} else { // LET || CONST_HARMONY
__ Branch(done, ne, at, Operand(zero_reg));
__ li(a0, Operand(var->name()));
@@ -1276,10 +1324,10 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in a2 and the global
// object (receiver) in a0.
- __ lw(a0, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(var->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(v0);
break;
}
@@ -1290,30 +1338,66 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
Comment cmnt(masm_, var->IsContextSlot()
? "Context variable"
: "Stack variable");
- if (!var->binding_needs_init()) {
- context()->Plug(var);
- } else {
- // Let and const need a read barrier.
- GetVar(v0, var);
- __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
- if (var->mode() == LET || var->mode() == CONST_HARMONY) {
- // Throw a reference error when using an uninitialized let/const
- // binding in harmony mode.
- Label done;
- __ Branch(&done, ne, at, Operand(zero_reg));
- __ li(a0, Operand(var->name()));
- __ push(a0);
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
- __ bind(&done);
+ if (var->binding_needs_init()) {
+ // var->scope() may be NULL when the proxy is located in eval code and
+ // refers to a potential outside binding. Currently those bindings are
+ // always looked up dynamically, i.e. in that case
+ // var->location() == LOOKUP.
+ // always holds.
+ ASSERT(var->scope() != NULL);
+
+ // Check if the binding really needs an initialization check. The check
+ // can be skipped in the following situation: we have a LET or CONST
+ // binding in harmony mode, both the Variable and the VariableProxy have
+ // the same declaration scope (i.e. they are both in global code, in the
+ // same function or in the same eval code) and the VariableProxy is in
+ // the source physically located after the initializer of the variable.
+ //
+ // We cannot skip any initialization checks for CONST in non-harmony
+ // mode because const variables may be declared but never initialized:
+ // if (false) { const x; }; var y = x;
+ //
+ // The condition on the declaration scopes is a conservative check for
+ // nested functions that access a binding and are called before the
+ // binding is initialized:
+ // function() { f(); let x = 1; function f() { x = 2; } }
+ //
+ bool skip_init_check;
+ if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
+ skip_init_check = false;
} else {
- // Uninitalized const bindings outside of harmony mode are unholed.
- ASSERT(var->mode() == CONST);
- __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
- __ movz(v0, a0, at); // Conditional move: Undefined if TheHole.
+ // Check that we always have valid source position.
+ ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
+ ASSERT(proxy->position() != RelocInfo::kNoPosition);
+ skip_init_check = var->mode() != CONST &&
+ var->initializer_position() < proxy->position();
+ }
+
+ if (!skip_init_check) {
+ // Let and const need a read barrier.
+ GetVar(v0, var);
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
+ if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+ // Throw a reference error when using an uninitialized let/const
+ // binding in harmony mode.
+ Label done;
+ __ Branch(&done, ne, at, Operand(zero_reg));
+ __ li(a0, Operand(var->name()));
+ __ push(a0);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ } else {
+ // Uninitalized const bindings outside of harmony mode are unholed.
+ ASSERT(var->mode() == CONST);
+ __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
+ __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
+ }
+ context()->Plug(v0);
+ break;
}
- context()->Plug(v0);
}
+ context()->Plug(var);
break;
}
@@ -1385,12 +1469,23 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+void FullCodeGenerator::EmitAccessor(Expression* expression) {
+ if (expression == NULL) {
+ __ LoadRoot(a1, Heap::kNullValueRootIndex);
+ __ push(a1);
+ } else {
+ VisitForStackValue(expression);
+ }
+}
+
+
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
+ Handle<FixedArray> constant_properties = expr->constant_properties();
__ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
- __ li(a1, Operand(expr->constant_properties()));
+ __ li(a1, Operand(constant_properties));
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
@@ -1399,10 +1494,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
: ObjectLiteral::kNoFlags;
__ li(a0, Operand(Smi::FromInt(flags)));
__ Push(a3, a2, a1, a0);
+ int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ __ CallStub(&stub);
}
// If result_saved is true the result is on top of the stack. If
@@ -1414,6 +1514,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
+ AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1437,10 +1538,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(a0, result_register());
__ li(a2, Operand(key->handle()));
__ lw(a1, MemOperand(sp));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, key->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1463,21 +1564,29 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
}
break;
case ObjectLiteral::Property::GETTER:
+ accessor_table.lookup(key)->second->getter = value;
+ break;
case ObjectLiteral::Property::SETTER:
- // Duplicate receiver on stack.
- __ lw(a0, MemOperand(sp));
- __ push(a0);
- VisitForStackValue(key);
- __ li(a1, Operand(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0)));
- __ push(a1);
- VisitForStackValue(value);
- __ CallRuntime(Runtime::kDefineAccessor, 4);
+ accessor_table.lookup(key)->second->setter = value;
break;
}
}
+ // Emit code to define accessors, using only a single call to the runtime for
+ // each pair of corresponding getters and setters.
+ for (AccessorTable::Iterator it = accessor_table.begin();
+ it != accessor_table.end();
+ ++it) {
+ __ lw(a0, MemOperand(sp)); // Duplicate receiver.
+ __ push(a0);
+ VisitForStackValue(it->first);
+ EmitAccessor(it->second->getter);
+ EmitAccessor(it->second->setter);
+ __ li(a0, Operand(Smi::FromInt(NONE)));
+ __ push(a0);
+ __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ }
+
if (expr->has_function()) {
ASSERT(result_saved);
__ lw(a0, MemOperand(sp));
@@ -1503,6 +1612,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1512,7 +1622,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ li(a2, Operand(Smi::FromInt(expr->literal_index())));
__ li(a1, Operand(constant_elements));
__ Push(a3, a2, a1);
- if (constant_elements_values->map() ==
+ if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
@@ -1527,10 +1637,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(constant_elements_kind == FAST_ELEMENTS ||
constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ FastCloneShallowArrayStub::Mode mode = has_fast_elements
+ ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
__ CallStub(&stub);
}
@@ -1552,63 +1661,30 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(v0);
result_saved = true;
}
+
VisitForAccumulatorValue(subexpr);
- __ lw(t6, MemOperand(sp)); // Copy of array literal.
- __ lw(a1, FieldMemOperand(t6, JSObject::kElementsOffset));
- __ lw(a2, FieldMemOperand(t6, JSObject::kMapOffset));
- int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-
- Label element_done;
- Label double_elements;
- Label smi_element;
- Label slow_elements;
- Label fast_elements;
- __ CheckFastElements(a2, a3, &double_elements);
-
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
- __ JumpIfSmi(result_register(), &smi_element);
- __ CheckFastSmiOnlyElements(a2, a3, &fast_elements);
-
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- __ bind(&slow_elements);
- __ push(t6); // Copy of array literal.
- __ li(a1, Operand(Smi::FromInt(i)));
- __ li(a2, Operand(Smi::FromInt(NONE))); // PropertyAttributes
- __ li(a3, Operand(Smi::FromInt(strict_mode_flag()))); // Strict mode.
- __ Push(a1, result_register(), a2, a3);
- __ CallRuntime(Runtime::kSetProperty, 5);
- __ Branch(&element_done);
-
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
- __ li(a3, Operand(Smi::FromInt(i)));
- __ StoreNumberToDoubleElements(result_register(), a3, t6, a1, t0, t1, t5,
- t3, &slow_elements);
- __ Branch(&element_done);
-
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ sw(result_register(), FieldMemOperand(a1, offset));
- // Update the write barrier for the array store.
-
- __ RecordWriteField(
- a1, offset, result_register(), a2, kRAHasBeenSaved, kDontSaveFPRegs,
- EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ Branch(&element_done);
-
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
- __ bind(&smi_element);
- __ sw(result_register(), FieldMemOperand(a1, offset));
- // Fall through
-
- __ bind(&element_done);
+ if (constant_elements_kind == FAST_ELEMENTS) {
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
+ __ lw(t2, MemOperand(sp)); // Copy of array literal.
+ __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
+ __ sw(result_register(), FieldMemOperand(a1, offset));
+ // Update the write barrier for the array store.
+ __ RecordWriteField(a1, offset, result_register(), a2,
+ kRAHasBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
+ } else {
+ __ lw(a1, MemOperand(sp)); // Copy of array literal.
+ __ lw(a2, FieldMemOperand(a1, JSObject::kMapOffset));
+ __ li(a3, Operand(Smi::FromInt(i)));
+ __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
+ __ mov(a0, result_register());
+ StoreArrayLiteralElementStub stub;
+ __ CallStub(&stub);
+ }
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
-
if (result_saved) {
context()->PlugTOS();
} else {
@@ -1738,7 +1814,7 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
__ li(a2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name a0 and a2.
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1747,7 +1823,7 @@ void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
__ mov(a0, result_register());
// Call keyed load IC. It has arguments key and receiver in a0 and a1.
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1775,7 +1851,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
BinaryOpStub stub(op, mode);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done);
@@ -1858,13 +1934,13 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(a1);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(v0);
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1896,10 +1972,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(a1, result_register());
__ pop(a0); // Restore value.
__ li(a2, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1909,14 +1985,13 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(a1, result_register());
__ pop(a2);
__ pop(a0); // Restore value.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
}
- PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(v0);
}
@@ -1927,11 +2002,11 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Global var, const, or let.
__ mov(a0, result_register());
__ li(a2, Operand(var->name()));
- __ lw(a1, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ lw(a1, GlobalObjectOperand());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
@@ -1961,7 +2036,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
if (var->IsLookupSlot()) {
__ push(v0); // Value.
__ li(a1, Operand(var->name()));
- __ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ li(a0, Operand(Smi::FromInt(language_mode())));
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
@@ -2009,7 +2084,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
ASSERT(var->IsLookupSlot());
__ push(v0); // Value.
__ li(a1, Operand(var->name()));
- __ li(a0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ li(a0, Operand(Smi::FromInt(language_mode())));
__ Push(cp, a1, a0); // Context, name, strict mode.
__ CallRuntime(Runtime::kStoreContextSlot, 4);
}
@@ -2047,10 +2122,10 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ pop(a1);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2099,10 +2174,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ pop(a2);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2137,6 +2212,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ Call(code, rmode, ast_id);
+}
+
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2154,7 +2237,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2187,7 +2270,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2207,6 +2290,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, flags);
+ __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2224,21 +2308,19 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
}
__ push(a1);
- // Push the receiver of the enclosing function and do runtime call.
+ // Push the receiver of the enclosing function.
int receiver_offset = 2 + info_->scope()->num_parameters();
__ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
__ push(a1);
- // Push the strict mode flag. In harmony mode every eval call
- // is a strict mode eval call.
- StrictModeFlag strict_mode =
- FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
- __ li(a1, Operand(Smi::FromInt(strict_mode)));
+ // Push the language mode.
+ __ li(a1, Operand(Smi::FromInt(language_mode())));
__ push(a1);
- // Push the qml mode flag.
- __ li(a1, Operand(Smi::FromInt(is_qml_mode())));
+ // Push the start position of the scope the calls resides in.
+ __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
__ push(a1);
+ // Do the runtime call.
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
}
@@ -2287,6 +2369,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
+ __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2294,7 +2377,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
context()->DropAndPlug(1, v0);
} else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
- __ lw(a0, proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ lw(a0, GlobalObjectOperand());
__ push(a0);
EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -2393,9 +2476,23 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ li(a0, Operand(arg_count));
__ lw(a1, MemOperand(sp, arg_count * kPointerSize));
- Handle<Code> construct_builtin =
- isolate()->builtins()->JSConstructCall();
- __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
+ // Record call targets in unoptimized code, but not in the snapshot.
+ CallFunctionFlags flags;
+ if (!Serializer::enabled()) {
+ flags = RECORD_CALL_TARGET;
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ li(a2, Operand(cell));
+ } else {
+ flags = NO_CALL_FUNCTION_FLAGS;
+ }
+
+ CallConstructStub stub(flags);
+ __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
}
@@ -2572,7 +2669,7 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
Label entry, loop;
// The use of t2 to store the valueOf symbol asumes that it is not otherwise
// used in the loop below.
- __ li(t2, Operand(FACTORY->value_of_symbol()));
+ __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
__ jmp(&entry);
__ bind(&loop);
__ lw(a3, MemOperand(t0, 0));
@@ -2880,7 +2977,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
__ Move(f14, zero_reg, a1);
// Subtract and store the result in the heap number.
__ sub_d(f0, f12, f14);
- __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(s0, HeapNumber::kValueOffset));
__ mov(v0, s0);
} else {
__ PrepareCallCFunction(2, a0);
@@ -2942,14 +3039,64 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = v0;
+ Register result = v0;
+ Register scratch0 = t5;
+ Register scratch1 = a1;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ GetObjectType(object, scratch1, scratch1);
+ __ Assert(eq, "Trying to get date field from non-date.",
+ scratch1, Operand(JS_DATE_TYPE));
+#endif
+
+ if (index->value() == 0) {
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ li(scratch1, Operand(stamp));
+ __ lw(scratch1, MemOperand(scratch1));
+ __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ Branch(&runtime, ne, scratch1, Operand(scratch0));
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch1);
+ __ li(a1, Operand(index));
+ __ Move(a0, object);
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+
+ context()->Plug(v0);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub;
- __ CallStub(&stub);
+ if (CpuFeatures::IsSupported(FPU)) {
+ MathPowStub stub(MathPowStub::ON_STACK);
+ __ CallStub(&stub);
+ } else {
+ __ CallRuntime(Runtime::kMath_pow, 2);
+ }
context()->Plug(v0);
}
@@ -3025,7 +3172,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Register object = a1;
Register index = a0;
- Register scratch = a2;
Register result = v0;
__ pop(object);
@@ -3035,7 +3181,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Label done;
StringCharCodeAtGenerator generator(object,
index,
- scratch,
result,
&need_conversion,
&need_conversion,
@@ -3074,8 +3219,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Register object = a1;
Register index = a0;
- Register scratch1 = a2;
- Register scratch2 = a3;
+ Register scratch = a3;
Register result = v0;
__ pop(object);
@@ -3085,8 +3229,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Label done;
StringCharAtGenerator generator(object,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&need_conversion,
&need_conversion,
@@ -3166,6 +3309,19 @@ void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
+ // Load the argument on the stack and call the stub.
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::TAGGED);
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 1);
+ VisitForStackValue(args->at(0));
+ __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos.
+ __ CallStub(&stub);
+ context()->Plug(v0);
+}
+
+
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3199,12 +3355,24 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
+ // Check for proxy.
+ Label proxy, done;
+ __ GetObjectType(v0, a1, a1);
+ __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
+
// InvokeFunction requires the function in a1. Move it in there.
__ mov(a1, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(a1, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ __ jmp(&done);
+
+ __ bind(&proxy);
+ __ push(v0);
+ __ CallRuntime(Runtime::kCall, args->length());
+ __ bind(&done);
+
context()->Plug(v0);
}
@@ -3390,8 +3558,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
__ Branch(&ok, eq, left, Operand(right));
// Fail if either is a non-HeapObject.
__ And(tmp, left, Operand(right));
- __ And(at, tmp, Operand(kSmiTagMask));
- __ Branch(&fail, eq, at, Operand(zero_reg));
+ __ JumpIfSmi(tmp, &fail);
__ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
__ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
__ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
@@ -3621,7 +3788,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// One-character separator case.
__ bind(&one_char_separator);
- // Replace separator with its ascii character value.
+ // Replace separator with its ASCII character value.
__ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
// Jump into the loop after the code that copies the separator, so the first
// element is not preceded by a separator.
@@ -3632,7 +3799,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// result_pos: the position to which we are currently copying characters.
// element: Current array element.
// elements_end: Array end.
- // separator: Single separator ascii char (in lower byte).
+ // separator: Single separator ASCII char (in lower byte).
// Copy the separator character to the result.
__ sb(separator, MemOperand(result_pos));
@@ -3717,7 +3884,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ Call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -3738,7 +3905,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
- __ li(a1, Operand(Smi::FromInt(strict_mode_flag())));
+ StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
+ __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
__ push(a1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(v0);
@@ -3746,9 +3915,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is allowed.
- ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
+ ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
if (var->IsUnallocated()) {
- __ lw(a2, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ lw(a2, GlobalObjectOperand());
__ li(a1, Operand(var->name()));
__ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(a2, a1, a0);
@@ -3871,7 +4040,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
__ mov(a0, result_register());
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(v0);
}
@@ -3982,7 +4151,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
SetSourcePosition(expr->position());
BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- __ Call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -4012,10 +4181,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(a0, result_register()); // Value.
__ li(a2, Operand(prop->key()->AsLiteral()->handle())); // Name.
__ pop(a1); // Receiver.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4030,10 +4199,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(a0, result_register()); // Value.
__ pop(a1); // Key.
__ pop(a2); // Receiver.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -4054,12 +4223,12 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
VariableProxy* proxy = expr->AsVariableProxy();
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
- __ lw(a0, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ lw(a0, GlobalObjectOperand());
__ li(a2, Operand(proxy->name()));
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ Call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(v0);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4237,7 +4406,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ Call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
@@ -4276,8 +4445,7 @@ void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
__ Branch(if_true, eq, a0, Operand(a1));
__ LoadRoot(a1, other_nil_value);
__ Branch(if_true, eq, a0, Operand(a1));
- __ And(at, a0, Operand(kSmiTagMask));
- __ Branch(if_false, eq, at, Operand(zero_reg));
+ __ JumpIfSmi(a0, if_false);
// It can be an undetectable object.
__ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
diff --git a/src/3rdparty/v8/src/mips/ic-mips.cc b/src/3rdparty/v8/src/mips/ic-mips.cc
index ca6383c..2c4da1a 100644
--- a/src/3rdparty/v8/src/mips/ic-mips.cc
+++ b/src/3rdparty/v8/src/mips/ic-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -401,7 +401,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
NORMAL,
argc);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
// If the stub cache probing failed, the receiver might be a value.
// For value objects, we use the map of the prototype objects for
@@ -437,7 +437,7 @@ void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
// Probe the stub cache for the value object.
__ bind(&probe);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
__ bind(&miss);
}
@@ -512,8 +512,8 @@ void CallICBase::GenerateMiss(MacroAssembler* masm,
__ Push(a3, a2);
// Call the entry.
- __ li(a0, Operand(2));
- __ li(a1, Operand(ExternalReference(IC_Utility(id), isolate)));
+ __ PrepareCEntryArgs(2);
+ __ PrepareCEntryFunction(ExternalReference(IC_Utility(id), isolate));
CEntryStub stub(1);
__ CallStub(&stub);
@@ -527,8 +527,7 @@ void CallICBase::GenerateMiss(MacroAssembler* masm,
if (id == IC::kCallIC_Miss) {
Label invoke, global;
__ lw(a2, MemOperand(sp, argc * kPointerSize));
- __ andi(t0, a2, kSmiTagMask);
- __ Branch(&invoke, eq, t0, Operand(zero_reg));
+ __ JumpIfSmi(a2, &invoke);
__ GetObjectType(a2, a3, a3);
__ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
__ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
@@ -703,7 +702,7 @@ void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// Probe the stub cache.
Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a0, a2, a3, t0, t1);
+ masm, flags, a0, a2, a3, t0, t1, t2);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -759,8 +758,6 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
Register scratch3,
Label* unmapped_case,
Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
// Check that the receiver is a JSObject. Because of the map check
// later, we do not need to check for interceptors or whether it
// requires access checks.
@@ -774,10 +771,12 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
__ Branch(slow_case, ne, scratch1, Operand(zero_reg));
// Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
__ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
-
+ __ CheckMap(scratch1,
+ scratch2,
+ Heap::kNonStrictArgumentsElementsMapRootIndex,
+ slow_case,
+ DONT_DO_SMI_CHECK);
// Check if element is in the range of mapped arguments. If not, jump
// to the unmapped lookup with the parameter map in scratch1.
__ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
@@ -789,7 +788,7 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
__ li(scratch3, Operand(kPointerSize >> 1));
- __ mul(scratch3, key, scratch3);
+ __ Mul(scratch3, key, scratch3);
__ Addu(scratch3, scratch3, Operand(kOffset));
__ Addu(scratch2, scratch1, scratch3);
@@ -802,7 +801,7 @@ static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
// map in scratch1).
__ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
__ li(scratch3, Operand(kPointerSize >> 1));
- __ mul(scratch3, scratch2, scratch3);
+ __ Mul(scratch3, scratch2, scratch3);
__ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
__ Addu(scratch2, scratch1, scratch3);
return MemOperand(scratch2);
@@ -821,13 +820,15 @@ static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
+ __ CheckMap(backing_store,
+ scratch,
+ Heap::kFixedArrayMapRootIndex,
+ slow_case,
DONT_DO_SMI_CHECK);
__ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
__ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
__ li(scratch, Operand(kPointerSize >> 1));
- __ mul(scratch, key, scratch);
+ __ Mul(scratch, key, scratch);
__ Addu(scratch,
scratch,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -845,8 +846,8 @@ void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
Label slow, notin;
MemOperand mapped_location =
GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, &notin, &slow);
+ __ Ret(USE_DELAY_SLOT);
__ lw(v0, mapped_location);
- __ Ret();
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in a2.
MemOperand unmapped_location =
@@ -854,8 +855,8 @@ void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
__ lw(a2, unmapped_location);
__ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
__ Branch(&slow, eq, a2, Operand(a3));
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, a2);
- __ Ret();
__ bind(&slow);
GenerateMiss(masm, false);
}
@@ -869,22 +870,26 @@ void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
// -- lr : return address
// -----------------------------------
Label slow, notin;
+ // Store address is returned in register (of MemOperand) mapped_location.
MemOperand mapped_location =
GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, &notin, &slow);
__ sw(a0, mapped_location);
- __ Addu(t2, a3, t1);
__ mov(t5, a0);
- __ RecordWrite(a3, t2, t5, kRAHasNotBeenSaved, kDontSaveFPRegs);
+ ASSERT_EQ(mapped_location.offset(), 0);
+ __ RecordWrite(a3, mapped_location.rm(), t5,
+ kRAHasNotBeenSaved, kDontSaveFPRegs);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0); // (In delay slot) return the value stored in v0.
__ bind(&notin);
// The unmapped lookup expects that the parameter map is in a3.
+ // Store address is returned in register (of MemOperand) unmapped_location.
MemOperand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
__ sw(a0, unmapped_location);
- __ Addu(t2, a3, t0);
__ mov(t5, a0);
- __ RecordWrite(a3, t2, t5, kRAHasNotBeenSaved, kDontSaveFPRegs);
+ ASSERT_EQ(unmapped_location.offset(), 0);
+ __ RecordWrite(a3, unmapped_location.rm(), t5,
+ kRAHasNotBeenSaved, kDontSaveFPRegs);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, a0); // (In delay slot) return the value stored in v0.
__ bind(&slow);
@@ -1030,19 +1035,32 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
__ sra(at, t0, String::kHashShift);
__ xor_(a3, a3, at);
- __ And(a3, a3, Operand(KeyedLookupCache::kCapacityMask));
+ int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
+ __ And(a3, a3, Operand(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label load_in_object_property;
+ static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
+ Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
__ li(t0, Operand(cache_keys));
__ sll(at, a3, kPointerSizeLog2 + 1);
__ addu(t0, t0, at);
- __ lw(t1, MemOperand(t0)); // Move t0 to symbol.
- __ Addu(t0, t0, Operand(kPointerSize));
+
+ for (int i = 0; i < kEntriesPerBucket - 1; i++) {
+ Label try_next_entry;
+ __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
+ __ Branch(&try_next_entry, ne, a2, Operand(t1));
+ __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
+ __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
+ __ bind(&try_next_entry);
+ }
+
+ __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
__ Branch(&slow, ne, a2, Operand(t1));
- __ lw(t1, MemOperand(t0));
+ __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
__ Branch(&slow, ne, a0, Operand(t1));
// Get field offset.
@@ -1052,15 +1070,24 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// a3 : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
- __ li(t0, Operand(cache_field_offsets));
- __ sll(at, a3, kPointerSizeLog2);
- __ addu(at, t0, at);
- __ lw(t1, MemOperand(at));
- __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
- __ Subu(t1, t1, t2);
- __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
+
+ // Hit on nth entry.
+ for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
+ __ bind(&hit_on_nth_entry[i]);
+ __ li(t0, Operand(cache_field_offsets));
+ __ sll(at, a3, kPointerSizeLog2);
+ __ addu(at, t0, at);
+ __ lw(t1, MemOperand(at, kPointerSize * i));
+ __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
+ __ Subu(t1, t1, t2);
+ __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
+ if (i != 0) {
+ __ Branch(&load_in_object_property);
+ }
+ }
// Load in-object property.
+ __ bind(&load_in_object_property);
__ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ addu(t2, t2, t1); // Index from start of object.
__ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
@@ -1121,14 +1148,12 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
Register receiver = a1;
Register index = a0;
- Register scratch1 = a2;
- Register scratch2 = a3;
+ Register scratch = a3;
Register result = v0;
StringCharAtGenerator char_at_generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1175,14 +1200,16 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Label slow, array, extra, check_if_double_array;
Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
+ Label transition_smi_elements, finish_object_store, non_double_value;
+ Label transition_double_elements;
// Register usage.
Register value = a0;
Register key = a1;
Register receiver = a2;
- Register elements = a3; // Elements array of the receiver.
+ Register receiver_map = a3;
Register elements_map = t2;
- Register receiver_map = t3;
+ Register elements = t3; // Elements array of the receiver.
// t0 and t1 are used as general scratch registers.
// Check that the key is a smi.
@@ -1228,8 +1255,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
__ Branch(&slow, hs, key, Operand(t0));
__ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ Branch(&check_if_double_array, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_array_map()));
+ __ Branch(
+ &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
+
// Calculate key + 1 as smi.
STATIC_ASSERT(kSmiTag == 0);
__ Addu(t0, key, Operand(Smi::FromInt(1)));
@@ -1237,8 +1265,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ Branch(&fast_object_without_map_check);
__ bind(&check_if_double_array);
- __ Branch(&slow, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_double_array_map()));
+ __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
// Add 1 to key, and go to common element store code for doubles.
STATIC_ASSERT(kSmiTag == 0);
__ Addu(t0, key, Operand(Smi::FromInt(1)));
@@ -1260,8 +1287,10 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Register scratch_value = t0;
Register address = t1;
__ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ Branch(&fast_double_with_map_check, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_array_map()));
+ __ Branch(&fast_double_with_map_check,
+ ne,
+ elements_map,
+ Heap::kFixedArrayMapRootIndex);
__ bind(&fast_object_without_map_check);
// Smi stores don't require further checks.
Label non_smi_value;
@@ -1275,9 +1304,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ mov(v0, value);
__ bind(&non_smi_value);
- // Escape to slow case when writing non-smi into smi-only array.
- __ CheckFastObjectElements(receiver_map, scratch_value, &slow);
+ // Escape to elements kind transition case.
+ __ CheckFastObjectElements(receiver_map, scratch_value,
+ &transition_smi_elements);
// Fast elements array, store the value to the elements backing store.
+ __ bind(&finish_object_store);
__ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
__ Addu(address, address, scratch_value);
@@ -1296,20 +1327,63 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ bind(&fast_double_with_map_check);
// Check for fast double array case. If this fails, call through to the
// runtime.
- __ Branch(&slow, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_double_array_map()));
+ __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
__ bind(&fast_double_without_map_check);
__ StoreNumberToDoubleElements(value,
key,
receiver,
elements,
+ a3,
t0,
t1,
t2,
- t3,
- &slow);
+ &transition_double_elements);
__ Ret(USE_DELAY_SLOT);
__ mov(v0, value);
+
+ __ bind(&transition_smi_elements);
+ // Transition the array appropriately depending on the value type.
+ __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
+ __ Branch(&non_double_value, ne, t0, Operand(at));
+
+ // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ receiver_map,
+ t0,
+ &slow);
+ ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
+ __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
}
@@ -1442,7 +1516,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
Code::Flags flags =
Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
Isolate::Current()->stub_cache()->GenerateProbe(
- masm, flags, a1, a2, a3, t0, t1);
+ masm, flags, a1, a2, a3, t0, t1, t2);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
@@ -1473,11 +1547,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// -- ra : return address
// -----------------------------------
//
- // This accepts as a receiver anything JSObject::SetElementsLength accepts
- // (currently anything except for external and pixel arrays which means
- // anything with elements of FixedArray type.), but currently is restricted
- // to JSArray.
- // Value must be a number, but only smis are accepted as the most common case.
+ // This accepts as a receiver anything JSArray::SetElementsLength accepts
+ // (currently anything except for external arrays which means anything with
+ // elements of FixedArray type). Value must be a number, but only smis are
+ // accepted as the most common case.
Label miss;
@@ -1499,6 +1572,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
__ GetObjectType(scratch, scratch, scratch);
__ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
+ // Check that the array has fast properties, otherwise the length
+ // property might have been redefined.
+ __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
+ __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
+ __ LoadRoot(at, Heap::kHashTableMapRootIndex);
+ __ Branch(&miss, eq, scratch, Operand(at));
+
// Check that value is a smi.
__ JumpIfNotSmi(value, &miss);
@@ -1590,6 +1670,9 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
rewritten = stub.GetCode();
} else {
ICCompareStub stub(op_, state);
+ if (state == KNOWN_OBJECTS) {
+ stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
+ }
rewritten = stub.GetCode();
}
set_target(*rewritten);
diff --git a/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc b/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
index e640b53..e4de40f 100644
--- a/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-codegen-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,44 +40,29 @@ class SafepointGenerator : public CallWrapper {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index)
+ Safepoint::DeoptMode mode)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index) { }
+ deopt_mode_(mode) { }
virtual ~SafepointGenerator() { }
- virtual void BeforeCall(int call_size) const {
- ASSERT(call_size >= 0);
- // Ensure that we have enough space after the previous safepoint position
- // for the generated code there.
- int call_end = codegen_->masm()->pc_offset() + call_size;
- int prev_jump_end =
- codegen_->LastSafepointEnd() + Deoptimizer::patch_size();
- if (call_end < prev_jump_end) {
- int padding_size = prev_jump_end - call_end;
- ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
- while (padding_size > 0) {
- codegen_->masm()->nop();
- padding_size -= Assembler::kInstrSize;
- }
- }
- }
+ virtual void BeforeCall(int call_size) const { }
virtual void AfterCall() const {
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
+ codegen_->RecordSafepoint(pointers_, deopt_mode_);
}
private:
LCodeGen* codegen_;
LPointerMap* pointers_;
- int deoptimization_index_;
+ Safepoint::DeoptMode deopt_mode_;
};
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope(FPU);
@@ -101,7 +86,6 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(GetStackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
- Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
@@ -157,7 +141,7 @@ bool LCodeGen::GeneratePrologue() {
// with undefined when called as functions (without an explicit
// receiver object). r5 is zero for method calls and non-zero for
// function calls.
- if (info_->is_strict_mode() || info_->is_native()) {
+ if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
__ Branch(&ok, eq, t1, Operand(zero_reg));
@@ -198,7 +182,7 @@ bool LCodeGen::GeneratePrologue() {
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
}
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
+ RecordSafepoint(Safepoint::kNoLazyDeopt);
// Context is returned in both v0 and cp. It replaces the context
// passed to us. It's saved in the stack and kept live in cp.
__ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -226,6 +210,7 @@ bool LCodeGen::GeneratePrologue() {
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
+ EnsureSpaceForLazyDeopt();
return !is_aborted();
}
@@ -251,15 +236,6 @@ bool LCodeGen::GenerateBody() {
}
-LInstruction* LCodeGen::GetNextInstruction() {
- if (current_instruction_ < instructions_->length() - 1) {
- return instructions_->at(current_instruction_ + 1);
- } else {
- return NULL;
- }
-}
-
-
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@@ -272,13 +248,6 @@ bool LCodeGen::GenerateDeferredCode() {
code->Generate();
__ jmp(code->exit());
}
-
- // Pad code to ensure that the last piece of deferred code have
- // room for lazy bailout.
- while ((masm()->pc_offset() - LastSafepointEnd())
- < Deoptimizer::patch_size()) {
- __ nop();
- }
}
// Deferred code is the last part of the instruction sequence. Mark
// the generated code as done unless we bailed out.
@@ -322,7 +291,22 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
if (op->IsRegister()) {
return ToRegister(op->index());
} else if (op->IsConstantOperand()) {
- __ li(scratch, ToOperand(op));
+ LConstantOperand* const_op = LConstantOperand::cast(op);
+ Handle<Object> literal = chunk_->LookupLiteral(const_op);
+ Representation r = chunk_->LookupLiteralRepresentation(const_op);
+ if (r.IsInteger32()) {
+ ASSERT(literal->IsNumber());
+ __ li(scratch, Operand(static_cast<int32_t>(literal->Number())));
+ } else if (r.IsDouble()) {
+ Abort("EmitLoadRegister: Unsupported double immediate.");
+ } else {
+ ASSERT(r.IsTagged());
+ if (literal->IsSmi()) {
+ __ li(scratch, Operand(literal));
+ } else {
+ __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal));
+ }
+ }
return scratch;
} else if (op->IsStackSlot() || op->IsArgument()) {
__ lw(scratch, ToMemOperand(op));
@@ -369,6 +353,18 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
}
+Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const {
+ Handle<Object> literal = chunk_->LookupLiteral(op);
+ ASSERT(chunk_->LookupLiteralRepresentation(op).IsTagged());
+ return literal;
+}
+
+
+bool LCodeGen::IsInteger32(LConstantOperand* op) const {
+ return chunk_->LookupLiteralRepresentation(op).IsInteger32();
+}
+
+
int LCodeGen::ToInteger32(LConstantOperand* op) const {
Handle<Object> value = chunk_->LookupLiteral(op);
ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
@@ -451,7 +447,19 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- translation->BeginFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
+ }
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -534,7 +542,7 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
__ Call(code, mode);
- RegisterLazyDeoptimization(instr, safepoint_mode);
+ RecordSafepointWithLazyDeopt(instr, safepoint_mode);
}
@@ -547,7 +555,7 @@ void LCodeGen::CallRuntime(const Runtime::Function* function,
RecordPosition(pointers->position());
__ CallRuntime(function, num_arguments);
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
}
@@ -556,37 +564,12 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
LInstruction* instr) {
__ CallRuntimeSaveDoubles(id);
RecordSafepointWithRegisters(
- instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
+ instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
}
-void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode) {
- // Create the environment to bailout to. If the call has side effects
- // execution has to continue after the call otherwise execution can continue
- // from a previous bailout point repeating the call.
- LEnvironment* deoptimization_environment;
- if (instr->HasDeoptimizationEnvironment()) {
- deoptimization_environment = instr->deoptimization_environment();
- } else {
- deoptimization_environment = instr->environment();
- }
-
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
- if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
- RecordSafepoint(instr->pointer_map(),
- deoptimization_environment->deoptimization_index());
- } else {
- ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- RecordSafepointWithRegisters(
- instr->pointer_map(),
- 0,
- deoptimization_environment->deoptimization_index());
- }
-}
-
-
-void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
+void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode) {
if (!environment->HasBeenRegistered()) {
// Physical stack frame layout:
// -x ............. -4 0 ..................................... y
@@ -602,13 +585,20 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
// |>------------ translation_size ------------<|
int frame_count = 0;
+ int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
+ if (e->frame_type() == JS_FUNCTION) {
+ ++jsframe_count;
+ }
}
- Translation translation(&translations_, frame_count);
+ Translation translation(&translations_, frame_count, jsframe_count);
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
- environment->Register(deoptimization_index, translation.index());
+ int pc_offset = masm()->pc_offset();
+ environment->Register(deoptimization_index,
+ translation.index(),
+ (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
deoptimizations_.Add(environment);
}
}
@@ -618,11 +608,10 @@ void LCodeGen::DeoptimizeIf(Condition cc,
LEnvironment* environment,
Register src1,
const Operand& src2) {
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
- ASSERT(entry != NULL);
if (entry == NULL) {
Abort("bailout was not prepared");
return;
@@ -645,20 +634,15 @@ void LCodeGen::DeoptimizeIf(Condition cc,
__ bind(&skip);
}
- if (cc == al) {
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
- } else {
- // TODO(plind): The Arm port is a little different here, due to their
- // DeOpt jump table, which is not used for Mips yet.
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2);
- }
+ // TODO(plind): The Arm port is a little different here, due to their
+ // DeOpt jump table, which is not used for Mips yet.
+ __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2);
}
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
@@ -683,6 +667,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
+ data->SetPc(i, Smi::FromInt(env->pc_offset()));
}
code->set_deoptimization_data(*data);
}
@@ -714,16 +699,28 @@ void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
}
+void LCodeGen::RecordSafepointWithLazyDeopt(
+ LInstruction* instr, SafepointMode safepoint_mode) {
+ if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
+ RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
+ } else {
+ ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), 0, Safepoint::kLazyDeopt);
+ }
+}
+
+
void LCodeGen::RecordSafepoint(
LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index) {
+ Safepoint::DeoptMode deopt_mode) {
ASSERT(expected_safepoint_kind_ == kind);
const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
- kind, arguments, deoptimization_index);
+ kind, arguments, deopt_mode);
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
@@ -740,31 +737,31 @@ void LCodeGen::RecordSafepoint(
void LCodeGen::RecordSafepoint(LPointerMap* pointers,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
}
-void LCodeGen::RecordSafepoint(int deoptimization_index) {
+void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
LPointerMap empty_pointers(RelocInfo::kNoPosition);
- RecordSafepoint(&empty_pointers, deoptimization_index);
+ RecordSafepoint(&empty_pointers, deopt_mode);
}
void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(
+ pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
}
void LCodeGen::RecordSafepointWithRegistersAndDoubles(
LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegistersAndDoubles, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(
+ pointers, Safepoint::kWithRegistersAndDoubles, arguments, deopt_mode);
}
@@ -799,12 +796,6 @@ void LCodeGen::DoGap(LGap* gap) {
LParallelMove* move = gap->GetParallelMove(inner_pos);
if (move != NULL) DoParallelMove(move);
}
-
- LInstruction* next = GetNextInstruction();
- if (next != NULL && next->IsLazyBailout()) {
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
- }
}
@@ -874,52 +865,47 @@ void LCodeGen::DoModI(LModI* instr) {
const Register left = ToRegister(instr->InputAt(0));
const Register result = ToRegister(instr->result());
- // p2constant holds the right side value if it's a power of 2 constant.
- // In other cases it is 0.
- int32_t p2constant = 0;
-
- if (instr->InputAt(1)->IsConstantOperand()) {
- p2constant = ToInteger32(LConstantOperand::cast(instr->InputAt(1)));
- if (p2constant % 2 != 0) {
- p2constant = 0;
- }
- // Result always takes the sign of the dividend (left).
- p2constant = abs(p2constant);
- }
-
- // div runs in the background while we check for special cases.
- Register right = EmitLoadRegister(instr->InputAt(1), scratch);
- __ div(left, right);
+ Label done;
- // Check for x % 0.
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
- DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg));
- }
+ if (instr->hydrogen()->HasPowerOf2Divisor()) {
+ Register scratch = scratch0();
+ ASSERT(!left.is(scratch));
+ __ mov(scratch, left);
+ int32_t p2constant = HConstant::cast(
+ instr->hydrogen()->right())->Integer32Value();
+ ASSERT(p2constant != 0);
+ // Result always takes the sign of the dividend (left).
+ p2constant = abs(p2constant);
- Label skip_div, do_div;
- if (p2constant != 0) {
- // Fall back to the result of the div instruction if we could have sign
- // problems.
- __ Branch(&do_div, lt, left, Operand(zero_reg));
- // Modulo by masking.
- __ And(scratch, left, p2constant - 1);
- __ Branch(&skip_div);
- }
+ Label positive_dividend;
+ __ Branch(USE_DELAY_SLOT, &positive_dividend, ge, left, Operand(zero_reg));
+ __ subu(result, zero_reg, left);
+ __ And(result, result, p2constant - 1);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
+ }
+ __ Branch(USE_DELAY_SLOT, &done);
+ __ subu(result, zero_reg, result);
+ __ bind(&positive_dividend);
+ __ And(result, scratch, p2constant - 1);
+ } else {
+ // div runs in the background while we check for special cases.
+ Register right = EmitLoadRegister(instr->InputAt(1), scratch);
+ __ div(left, right);
- __ bind(&do_div);
- __ mfhi(scratch);
- __ bind(&skip_div);
+ // Check for x % 0.
+ if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
+ DeoptimizeIf(eq, instr->environment(), right, Operand(zero_reg));
+ }
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- // Result always takes the sign of the dividend (left).
- Label done;
__ Branch(USE_DELAY_SLOT, &done, ge, left, Operand(zero_reg));
- __ mov(result, scratch);
- DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
- __ bind(&done);
- } else {
- __ Move(result, scratch);
+ __ mfhi(result);
+
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
+ }
}
+ __ bind(&done);
}
@@ -1027,7 +1013,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
} else {
// Generate standard code.
__ li(at, constant);
- __ mul(result, left, at);
+ __ Mul(result, left, at);
}
}
@@ -1045,7 +1031,7 @@ void LCodeGen::DoMulI(LMulI* instr) {
__ sra(at, result, 31);
DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
} else {
- __ mul(result, left, right);
+ __ Mul(result, left, right);
}
if (bailout_on_minus_zero) {
@@ -1213,8 +1199,13 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
void LCodeGen::DoConstantT(LConstantT* instr) {
- ASSERT(instr->result()->IsRegister());
- __ li(ToRegister(instr->result()), Operand(instr->value()));
+ Handle<Object> value = instr->value();
+ if (value->IsSmi()) {
+ __ li(ToRegister(instr->result()), Operand(value));
+ } else {
+ __ LoadHeapObject(ToRegister(instr->result()),
+ Handle<HeapObject>::cast(value));
+ }
}
@@ -1265,6 +1256,46 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(a0));
+ ASSERT(result.is(v0));
+ ASSERT(!scratch.is(scratch0()));
+ ASSERT(!scratch.is(object));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ GetObjectType(object, scratch, scratch);
+ __ Assert(eq, "Trying to get date field from non-date.",
+ scratch, Operand(JS_DATE_TYPE));
+#endif
+
+ if (index->value() == 0) {
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ li(scratch, Operand(stamp));
+ __ lw(scratch, MemOperand(scratch));
+ __ lw(scratch0(), FieldMemOperand(object, JSDate::kCacheStampOffset));
+ __ Branch(&runtime, ne, scratch, Operand(scratch0()));
+ __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2, scratch);
+ __ li(a1, Operand(index));
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
Register input = ToRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
@@ -1686,9 +1717,9 @@ void LCodeGen::DoIsNilAndBranch(LIsNilAndBranch* instr) {
Condition LCodeGen::EmitIsObject(Register input,
Register temp1,
+ Register temp2,
Label* is_not_object,
Label* is_object) {
- Register temp2 = scratch0();
__ JumpIfSmi(input, is_not_object);
__ LoadRoot(temp2, Heap::kNullValueRootIndex);
@@ -1721,13 +1752,39 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Label* false_label = chunk_->GetAssemblyLabel(false_block);
Condition true_cond =
- EmitIsObject(reg, temp1, false_label, true_label);
+ EmitIsObject(reg, temp1, temp2, false_label, true_label);
EmitBranch(true_block, false_block, true_cond, temp2,
Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
}
+Condition LCodeGen::EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string) {
+ __ JumpIfSmi(input, is_not_string);
+ __ GetObjectType(input, temp1, temp1);
+
+ return lt;
+}
+
+
+void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
+ Register reg = ToRegister(instr->InputAt(0));
+ Register temp1 = ToRegister(instr->TempAt(0));
+
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
+
+ Condition true_cond =
+ EmitIsString(reg, temp1, false_label);
+
+ EmitBranch(true_block, false_block, true_cond, temp1,
+ Operand(FIRST_NONSTRING_TYPE));
+}
+
+
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1753,6 +1810,40 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
}
+static Condition ComputeCompareCondition(Token::Value op) {
+ switch (op) {
+ case Token::EQ_STRICT:
+ case Token::EQ:
+ return eq;
+ case Token::LT:
+ return lt;
+ case Token::GT:
+ return gt;
+ case Token::LTE:
+ return le;
+ case Token::GTE:
+ return ge;
+ default:
+ UNREACHABLE();
+ return kNoCondition;
+ }
+}
+
+
+void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
+ Token::Value op = instr->op();
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+
+ Condition condition = ComputeCompareCondition(op);
+
+ EmitBranch(true_block, false_block, condition, v0, Operand(zero_reg));
+}
+
+
static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
@@ -1821,9 +1912,8 @@ void LCodeGen::DoHasCachedArrayIndexAndBranch(
}
-// Branches to a label or falls through with this instance class-name adr
-// returned in temp reg, available for comparison by the caller. Trashes the
-// temp registers, but not the input. Only input and temp2 may alias.
+// Branches to a label or falls through with the answer in flags. Trashes
+// the temp registers, but not the input.
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String>class_name,
@@ -1831,7 +1921,9 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
Register temp,
Register temp2) {
ASSERT(!input.is(temp));
- ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
+ ASSERT(!input.is(temp2));
+ ASSERT(!temp.is(temp2));
+
__ JumpIfSmi(input, is_false);
if (class_name->IsEqualTo(CStrVector("Function"))) {
@@ -1941,7 +2033,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
+ codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
virtual LInstruction* instr() { return instr_; }
Label* map_check() { return &map_check_; }
@@ -1977,12 +2069,15 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
- __ li(at, Operand(factory()->the_hole_value()), true);
+ Handle<JSGlobalPropertyCell> cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ li(at, Operand(Handle<Object>(cell)));
+ __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset));
__ Branch(&cache_miss, ne, map, Operand(at));
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch
// with true or false.
- __ li(result, Operand(factory()->the_hole_value()), true);
+ __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE);
__ Branch(&done);
// The inlined call site cache did not match. Check null and string before
@@ -2009,8 +2104,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
}
-void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
+void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check) {
Register result = ToRegister(instr->result());
ASSERT(result.is(v0));
@@ -2030,46 +2125,29 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
// offset to the location of the map check.
Register temp = ToRegister(instr->TempAt(0));
ASSERT(temp.is(t0));
- __ li(InstanceofStub::right(), Operand(instr->function()));
+ __ LoadHeapObject(InstanceofStub::right(), instr->function());
static const int kAdditionalDelta = 7;
int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta;
Label before_push_delta;
__ bind(&before_push_delta);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
- __ li(temp, Operand(delta * kPointerSize), true);
+ __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
__ StoreToSafepointRegisterSlot(temp, temp);
}
CallCodeGeneric(stub.GetCode(),
RelocInfo::CODE_TARGET,
instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasDeoptimizationEnvironment());
+ LEnvironment* env = instr->deoptimization_environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Put the result value into the result register slot and
// restore all registers.
__ StoreToSafepointRegisterSlot(result, result);
}
-static Condition ComputeCompareCondition(Token::Value op) {
- switch (op) {
- case Token::EQ_STRICT:
- case Token::EQ:
- return eq;
- case Token::LT:
- return lt;
- case Token::GT:
- return gt;
- case Token::LTE:
- return le;
- case Token::GTE:
- return ge;
- default:
- UNREACHABLE();
- return kNoCondition;
- }
-}
-
-
void LCodeGen::DoCmpT(LCmpT* instr) {
Token::Value op = instr->op();
@@ -2129,41 +2207,27 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
- Register value = ToRegister(instr->InputAt(0));
- Register scratch = scratch0();
- Register scratch2 = ToRegister(instr->TempAt(0));
+ Register value = ToRegister(instr->value());
+ Register cell = scratch0();
// Load the cell.
- __ li(scratch, Operand(Handle<Object>(instr->hydrogen()->cell())));
+ __ li(cell, Operand(instr->hydrogen()->cell()));
// If the cell we are storing to contains the hole it could have
// been deleted from the property dictionary. In that case, we need
// to update the property details in the property dictionary to mark
// it as no longer deleted.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ lw(scratch2,
- FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
+ // We use a temp to check the payload.
+ Register payload = ToRegister(instr->TempAt(0));
+ __ lw(payload, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- DeoptimizeIf(eq, instr->environment(), scratch2, Operand(at));
+ DeoptimizeIf(eq, instr->environment(), payload, Operand(at));
}
// Store the value.
- __ sw(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
-
- // Cells are always in the remembered set.
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- HType type = instr->hydrogen()->value()->type();
- SmiCheck check_needed =
- type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK;
- __ RecordWriteField(scratch,
- JSGlobalPropertyCell::kValueOffset,
- value,
- scratch2,
- kRAHasBeenSaved,
- kSaveFPRegs,
- OMIT_REMEMBERED_SET,
- check_needed);
- }
+ __ sw(value, FieldMemOperand(cell, JSGlobalPropertyCell::kValueOffset));
+ // Cells are always rescanned, so no write barrier here.
}
@@ -2172,7 +2236,7 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(a0));
__ li(a2, Operand(instr->name()));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
@@ -2182,14 +2246,42 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
+
__ lw(result, ContextOperand(context, instr->slot_index()));
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(eq, instr->environment(), result, Operand(at));
+ } else {
+ Label is_not_hole;
+ __ Branch(&is_not_hole, ne, result, Operand(at));
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
+ __ bind(&is_not_hole);
+ }
+ }
}
void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
Register context = ToRegister(instr->context());
Register value = ToRegister(instr->value());
+ Register scratch = scratch0();
MemOperand target = ContextOperand(context, instr->slot_index());
+
+ Label skip_assignment;
+
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ lw(scratch, target);
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(eq, instr->environment(), scratch, Operand(at));
+ } else {
+ __ Branch(&skip_assignment, ne, scratch, Operand(at));
+ }
+ }
+
__ sw(value, target);
if (instr->hydrogen()->NeedsWriteBarrier()) {
HType type = instr->hydrogen()->value()->type();
@@ -2204,6 +2296,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
EMIT_REMEMBERED_SET,
check_needed);
}
+
+ __ bind(&skip_assignment);
}
@@ -2225,7 +2319,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Handle<String> name) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsProperty() &&
+ ASSERT(lookup.IsFound() &&
(lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
if (lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
@@ -2241,7 +2335,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
}
} else {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
- LoadHeapObject(result, Handle<HeapObject>::cast(function));
+ __ LoadHeapObject(result, function);
}
}
@@ -2565,8 +2659,8 @@ void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
// Result is the frame pointer for the frame if not adapted and for the real
// frame below the adaptor frame if adapted.
- __ movn(result, fp, temp); // move only if temp is not equal to zero (ne)
- __ movz(result, scratch, temp); // move only if temp is equal to zero (eq)
+ __ Movn(result, fp, temp); // Move only if temp is not equal to zero (ne).
+ __ Movz(result, scratch, temp); // Move only if temp is equal to zero (eq).
}
@@ -2591,15 +2685,10 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
}
-void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
Register scratch = scratch0();
- ASSERT(receiver.is(a0)); // Used for parameter count.
- ASSERT(function.is(a1)); // Required by InvokeFunction.
- ASSERT(ToRegister(instr->result()).is(v0));
// If the receiver is null or undefined, we have to pass the global
// object as a receiver to normal functions. Values have to be
@@ -2640,6 +2729,17 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ lw(receiver,
FieldMemOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
+}
+
+void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+ Register receiver = ToRegister(instr->receiver());
+ Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
+ Register scratch = scratch0();
+ ASSERT(receiver.is(a0)); // Used for parameter count.
+ ASSERT(function.is(a1)); // Required by InvokeFunction.
+ ASSERT(ToRegister(instr->result()).is(v0));
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2670,15 +2770,12 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ bind(&invoke);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
// The number of arguments is stored in receiver which is a0, as expected
// by InvokeFunction.
- v8::internal::ParameterCount actual(receiver);
+ ParameterCount actual(receiver);
__ InvokeFunction(function, actual, CALL_FUNCTION,
safepoint_generator, CALL_AS_METHOD);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2698,7 +2795,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- LoadHeapObject(result, instr->hydrogen()->closure());
+ __ LoadHeapObject(result, instr->hydrogen()->closure());
}
@@ -2716,6 +2813,15 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
}
+void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
+ __ LoadHeapObject(scratch0(), instr->hydrogen()->pairs());
+ __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags())));
+ // The context is the first argument.
+ __ Push(cp, scratch0(), scratch1());
+ CallRuntime(Runtime::kDeclareGlobals, 3, instr);
+}
+
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -2734,31 +2840,41 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
CallKind call_kind) {
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
- }
-
- // Set a0 to arguments count if adaption is not needed. Assumes that a0
- // is available to write to at this point.
- if (!function->NeedsArgumentsAdaption()) {
- __ li(a0, Operand(arity));
- }
+ bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
+ function->shared()->formal_parameter_count() == arity;
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- // Invoke function.
- __ SetCallKind(t1, call_kind);
- __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
- __ Call(at);
+ if (can_invoke_directly) {
+ __ LoadHeapObject(a1, function);
+ // Change context if needed.
+ bool change_context =
+ (info()->closure()->context() != function->context()) ||
+ scope()->contains_with() ||
+ (scope()->num_heap_slots() > 0);
+ if (change_context) {
+ __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+ }
- // Setup deoptimization.
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT);
+ // Set r0 to arguments count if adaption is not needed. Assumes that r0
+ // is available to write to at this point.
+ if (!function->NeedsArgumentsAdaption()) {
+ __ li(a0, Operand(arity));
+ }
+
+ // Invoke function.
+ __ SetCallKind(t1, call_kind);
+ __ lw(at, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
+ __ Call(at);
+
+ // Set up deoptimization.
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT);
+ } else {
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(arity);
+ __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
+ }
// Restore context.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2768,7 +2884,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
__ mov(a0, v0);
- __ li(a1, Operand(instr->function()));
CallKnownFunction(instr->function(), instr->arity(), instr, CALL_AS_METHOD);
}
@@ -2847,7 +2962,7 @@ void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
__ mov(result, input);
ASSERT_EQ(2, masm()->InstructionsGeneratedSince(&done));
__ subu(result, zero_reg, input);
- // Overflow if result is still negative, ie 0x80000000.
+ // Overflow if result is still negative, i.e. 0x80000000.
DeoptimizeIf(lt, instr->environment(), result, Operand(zero_reg));
__ bind(&done);
}
@@ -2953,11 +3068,11 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ And(scratch, result, Operand(HeapNumber::kSignMask));
__ Move(double_scratch0(), 0.5);
- __ add_d(input, input, double_scratch0());
+ __ add_d(double_scratch0(), input, double_scratch0());
// Check sign of the result: if the sign changed, the input
// value was in ]0.5, 0[ and the result should be -0.
- __ mfc1(result, input.high());
+ __ mfc1(result, double_scratch0().high());
__ Xor(result, result, Operand(scratch));
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
// ARM uses 'mi' here, which is 'lt'
@@ -2977,7 +3092,7 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
__ EmitFPUTruncate(kRoundToMinusInf,
double_scratch0().low(),
- input,
+ double_scratch0(),
result,
except_flag);
@@ -3007,69 +3122,130 @@ void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
DoubleRegister result = ToDoubleRegister(instr->result());
- DoubleRegister double_scratch = double_scratch0();
+ DoubleRegister temp = ToDoubleRegister(instr->TempAt(0));
+
+ ASSERT(!input.is(result));
+
+ // Note that according to ECMA-262 15.8.2.13:
+ // Math.pow(-Infinity, 0.5) == Infinity
+ // Math.sqrt(-Infinity) == NaN
+ Label done;
+ __ Move(temp, -V8_INFINITY);
+ __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, temp, input);
+ // Set up Infinity in the delay slot.
+ // result is overwritten if the branch is not taken.
+ __ neg_d(result, temp);
// Add +0 to convert -0 to +0.
- __ mtc1(zero_reg, double_scratch.low());
- __ mtc1(zero_reg, double_scratch.high());
- __ add_d(result, input, double_scratch);
+ __ add_d(result, input, kDoubleRegZero);
__ sqrt_d(result, result);
+ __ bind(&done);
}
void LCodeGen::DoPower(LPower* instr) {
- LOperand* left = instr->InputAt(0);
- LOperand* right = instr->InputAt(1);
- Register scratch = scratch0();
- DoubleRegister result_reg = ToDoubleRegister(instr->result());
Representation exponent_type = instr->hydrogen()->right()->representation();
- if (exponent_type.IsDouble()) {
- // Prepare arguments and call C function.
- __ PrepareCallCFunction(0, 2, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left),
- ToDoubleRegister(right));
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 0, 2);
+ // Having marked this as a call, we can use any registers.
+ // Just make sure that the input/output registers are the expected ones.
+ ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
+ ToDoubleRegister(instr->InputAt(1)).is(f4));
+ ASSERT(!instr->InputAt(1)->IsRegister() ||
+ ToRegister(instr->InputAt(1)).is(a2));
+ ASSERT(ToDoubleRegister(instr->InputAt(0)).is(f2));
+ ASSERT(ToDoubleRegister(instr->result()).is(f0));
+
+ if (exponent_type.IsTagged()) {
+ Label no_deopt;
+ __ JumpIfSmi(a2, &no_deopt);
+ __ lw(t3, FieldMemOperand(a2, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, instr->environment(), t3, Operand(at));
+ __ bind(&no_deopt);
+ MathPowStub stub(MathPowStub::TAGGED);
+ __ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- ASSERT(ToRegister(right).is(a0));
- // Prepare arguments and call C function.
- __ PrepareCallCFunction(1, 1, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left), ToRegister(right));
- __ CallCFunction(
- ExternalReference::power_double_int_function(isolate()), 1, 1);
+ MathPowStub stub(MathPowStub::INTEGER);
+ __ CallStub(&stub);
} else {
- ASSERT(exponent_type.IsTagged());
- ASSERT(instr->hydrogen()->left()->representation().IsDouble());
+ ASSERT(exponent_type.IsDouble());
+ MathPowStub stub(MathPowStub::DOUBLE);
+ __ CallStub(&stub);
+ }
+}
- Register right_reg = ToRegister(right);
- // Check for smi on the right hand side.
- Label non_smi, call;
- __ JumpIfNotSmi(right_reg, &non_smi);
+void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
- // Untag smi and convert it to a double.
- __ SmiUntag(right_reg);
- FPURegister single_scratch = double_scratch0();
- __ mtc1(right_reg, single_scratch);
- __ cvt_d_w(result_reg, single_scratch);
- __ Branch(&call);
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+ // Having marked this instruction as a call we can use any
+ // registers.
+ ASSERT(ToDoubleRegister(instr->result()).is(f0));
+ ASSERT(ToRegister(instr->InputAt(0)).is(a0));
- // Heap number map check.
- __ bind(&non_smi);
- __ lw(scratch, FieldMemOperand(right_reg, HeapObject::kMapOffset));
- __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
- DeoptimizeIf(ne, instr->environment(), scratch, Operand(at));
- __ ldc1(result_reg, FieldMemOperand(right_reg, HeapNumber::kValueOffset));
-
- // Prepare arguments and call C function.
- __ bind(&call);
- __ PrepareCallCFunction(0, 2, scratch);
- __ SetCallCDoubleArguments(ToDoubleRegister(left), result_reg);
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 0, 2);
- }
- // Store the result in the result register.
- __ GetCFunctionDoubleResult(result_reg);
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == kSeedSize);
+
+ __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ lw(a2, FieldMemOperand(a0, kRandomSeedOffset));
+ // a2: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ lw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
+ __ Branch(deferred->entry(), eq, a1, Operand(zero_reg));
+ // Load state[1].
+ __ lw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
+ // a1: state[0].
+ // a0: state[1].
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ __ And(a3, a1, Operand(0xFFFF));
+ __ li(t0, Operand(18273));
+ __ mul(a3, a3, t0);
+ __ srl(a1, a1, 16);
+ __ Addu(a1, a3, a1);
+ // Save state[0].
+ __ sw(a1, FieldMemOperand(a2, ByteArray::kHeaderSize));
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ And(a3, a0, Operand(0xFFFF));
+ __ li(t0, Operand(36969));
+ __ mul(a3, a3, t0);
+ __ srl(a0, a0, 16),
+ __ Addu(a0, a3, a0);
+ // Save state[1].
+ __ sw(a0, FieldMemOperand(a2, ByteArray::kHeaderSize + kSeedSize));
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ And(a0, a0, Operand(0x3FFFF));
+ __ sll(a1, a1, 14);
+ __ Addu(v0, a0, a1);
+
+ __ bind(deferred->exit());
+
+ // 0x41300000 is the top half of 1.0 x 2^20 as a double.
+ __ li(a2, Operand(0x41300000));
+ // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
+ __ Move(f12, v0, a2);
+ // Move 0x4130000000000000 to FPU.
+ __ Move(f14, zero_reg, a2);
+ // Subtract to get the result.
+ __ sub_d(f0, f12, f14);
+}
+
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1, scratch0());
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+ // Return value is in v0.
}
@@ -3081,6 +3257,14 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
}
+void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
+ ASSERT(ToDoubleRegister(instr->result()).is(f4));
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::UNTAGGED);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(f4));
TranscendentalCacheStub stub(TranscendentalCache::COS,
@@ -3120,6 +3304,9 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
case kMathSin:
DoMathSin(instr);
break;
+ case kMathTan:
+ DoMathTan(instr);
+ break;
case kMathLog:
DoMathLog(instr);
break;
@@ -3135,10 +3322,8 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(instr->HasPointerMap());
ASSERT(instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount count(instr->arity());
__ InvokeFunction(a1, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -3171,12 +3356,12 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
void LCodeGen::DoCallFunction(LCallFunction* instr) {
+ ASSERT(ToRegister(instr->function()).is(a1));
ASSERT(ToRegister(instr->result()).is(v0));
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
- __ Drop(1);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
}
@@ -3196,7 +3381,6 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- __ li(a1, Operand(instr->target()));
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
}
@@ -3205,9 +3389,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
ASSERT(ToRegister(instr->InputAt(0)).is(a1));
ASSERT(ToRegister(instr->result()).is(v0));
- Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
__ li(a0, Operand(instr->arity()));
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
+ CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
@@ -3271,7 +3455,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
// Name is always in a2.
__ li(a2, Operand(instr->name()));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3292,13 +3476,6 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
Register scratch = scratch0();
- // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- // conversion, so it deopts in that case.
- if (instr->hydrogen()->ValueNeedsSmiCheck()) {
- __ And(at, value, Operand(kSmiTagMask));
- DeoptimizeIf(ne, instr->environment(), at, Operand(zero_reg));
- }
-
// Do the store.
if (instr->key()->IsConstantOperand()) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
@@ -3452,7 +3629,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->key()).is(a1));
ASSERT(ToRegister(instr->value()).is(a0));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3522,86 +3699,13 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
LStringCharCodeAt* instr_;
};
- Register temp = scratch1();
- Register string = ToRegister(instr->string());
- Register index = ToRegister(instr->index());
- Register result = ToRegister(instr->result());
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
-
- // Fetch the instance type of the receiver into result register.
- __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
- __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
-
- // We need special handling for indirect strings.
- Label check_sequential;
- __ And(temp, result, kIsIndirectStringMask);
- __ Branch(&check_sequential, eq, temp, Operand(zero_reg));
-
- // Dispatch on the indirect string shape: slice or cons.
- Label cons_string;
- __ And(temp, result, kSlicedNotConsMask);
- __ Branch(&cons_string, eq, temp, Operand(zero_reg));
-
- // Handle slices.
- Label indirect_string_loaded;
- __ lw(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
- __ sra(temp, result, kSmiTagSize);
- __ addu(index, index, temp);
- __ lw(string, FieldMemOperand(string, SlicedString::kParentOffset));
- __ jmp(&indirect_string_loaded);
-
- // Handle conses.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- __ bind(&cons_string);
- __ lw(result, FieldMemOperand(string, ConsString::kSecondOffset));
- __ LoadRoot(temp, Heap::kEmptyStringRootIndex);
- __ Branch(deferred->entry(), ne, result, Operand(temp));
- // Get the first of the two strings and load its instance type.
- __ lw(string, FieldMemOperand(string, ConsString::kFirstOffset));
-
- __ bind(&indirect_string_loaded);
- __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
- __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
-
- // Check whether the string is sequential. The only non-sequential
- // shapes we support have just been unwrapped above.
- __ bind(&check_sequential);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ And(temp, result, Operand(kStringRepresentationMask));
- __ Branch(deferred->entry(), ne, temp, Operand(zero_reg));
-
- // Dispatch on the encoding: ASCII or two-byte.
- Label ascii_string;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ And(temp, result, Operand(kStringEncodingMask));
- __ Branch(&ascii_string, ne, temp, Operand(zero_reg));
-
- // Two-byte string.
- // Load the two-byte character code into the result register.
- Label done;
- __ Addu(result,
- string,
- Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- __ sll(temp, index, 1);
- __ Addu(result, result, temp);
- __ lhu(result, MemOperand(result, 0));
- __ Branch(&done);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ Addu(result,
- string,
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ Addu(result, result, index);
- __ lbu(result, MemOperand(result, 0));
-
- __ bind(&done);
+ StringCharLoadGenerator::Generate(masm(),
+ ToRegister(instr->string()),
+ ToRegister(instr->index()),
+ ToRegister(instr->result()),
+ deferred->entry());
__ bind(deferred->exit());
}
@@ -3722,13 +3826,12 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
LNumberTagI* instr_;
};
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
- Register reg = ToRegister(input);
+ Register src = ToRegister(instr->InputAt(0));
+ Register dst = ToRegister(instr->result());
Register overflow = scratch0();
DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
- __ SmiTagCheckOverflow(reg, overflow);
+ __ SmiTagCheckOverflow(dst, src, overflow);
__ BranchOnOverflow(deferred->entry(), overflow);
__ bind(deferred->exit());
}
@@ -3736,7 +3839,8 @@ void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
Label slow;
- Register reg = ToRegister(instr->InputAt(0));
+ Register src = ToRegister(instr->InputAt(0));
+ Register dst = ToRegister(instr->result());
FPURegister dbl_scratch = double_scratch0();
// Preserve the value of all registers.
@@ -3746,14 +3850,16 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// disagree. Try to allocate a heap number in new space and store
// the value in there. If that fails, call the runtime system.
Label done;
- __ SmiUntag(reg);
- __ Xor(reg, reg, Operand(0x80000000));
- __ mtc1(reg, dbl_scratch);
+ if (dst.is(src)) {
+ __ SmiUntag(src, dst);
+ __ Xor(src, src, Operand(0x80000000));
+ }
+ __ mtc1(src, dbl_scratch);
__ cvt_d_w(dbl_scratch, dbl_scratch);
if (FLAG_inline_new) {
__ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(t1, a3, t0, t2, &slow);
- if (!reg.is(t1)) __ mov(reg, t1);
+ __ Move(dst, t1);
__ Branch(&done);
}
@@ -3763,15 +3869,15 @@ void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
// TODO(3095996): Put a valid pointer value in the stack slot where the result
// register is stored, as this register is in the pointer map, but contains an
// integer value.
- __ StoreToSafepointRegisterSlot(zero_reg, reg);
+ __ StoreToSafepointRegisterSlot(zero_reg, dst);
CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr);
- if (!reg.is(v0)) __ mov(reg, v0);
+ __ Move(dst, v0);
// Done. Put the value in dbl_scratch into the value of the allocated heap
// number.
__ bind(&done);
- __ sdc1(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
- __ StoreToSafepointRegisterSlot(reg, reg);
+ __ sdc1(dbl_scratch, FieldMemOperand(dst, HeapNumber::kValueOffset));
+ __ StoreToSafepointRegisterSlot(dst, dst);
}
@@ -3818,25 +3924,23 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
void LCodeGen::DoSmiTag(LSmiTag* instr) {
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
- __ SmiTag(ToRegister(input));
+ __ SmiTag(ToRegister(instr->result()), ToRegister(instr->InputAt(0)));
}
void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
Register scratch = scratch0();
- LOperand* input = instr->InputAt(0);
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
+ Register input = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
if (instr->needs_check()) {
STATIC_ASSERT(kHeapObjectTag == 1);
// If the input is a HeapObject, value of scratch won't be zero.
- __ And(scratch, ToRegister(input), Operand(kHeapObjectTag));
- __ SmiUntag(ToRegister(input));
+ __ And(scratch, input, Operand(kHeapObjectTag));
+ __ SmiUntag(result, input);
DeoptimizeIf(ne, instr->environment(), scratch, Operand(zero_reg));
} else {
- __ SmiUntag(ToRegister(input));
+ __ SmiUntag(result, input);
}
}
@@ -3844,13 +3948,14 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
void LCodeGen::EmitNumberUntagD(Register input_reg,
DoubleRegister result_reg,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env) {
Register scratch = scratch0();
Label load_smi, heap_number, done;
// Smi check.
- __ JumpIfSmi(input_reg, &load_smi);
+ __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
// Heap number map check.
__ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
@@ -3873,14 +3978,19 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
}
// Heap number to double register conversion.
__ ldc1(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
+ if (deoptimize_on_minus_zero) {
+ __ mfc1(at, result_reg.low());
+ __ Branch(&done, ne, at, Operand(zero_reg));
+ __ mfc1(scratch, result_reg.high());
+ DeoptimizeIf(eq, env, scratch, Operand(HeapNumber::kSignMask));
+ }
__ Branch(&done);
// Smi to double register conversion
__ bind(&load_smi);
- __ SmiUntag(input_reg); // Untag smi before converting to float.
- __ mtc1(input_reg, result_reg);
+ // scratch: untagged value of input_reg
+ __ mtc1(scratch, result_reg);
__ cvt_d_w(result_reg, result_reg);
- __ SmiTag(input_reg); // Retag smi.
__ bind(&done);
}
@@ -4004,6 +4114,7 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
EmitNumberUntagD(input_reg, result_reg,
instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment());
}
@@ -4097,23 +4208,50 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
- ASSERT(instr->InputAt(0)->IsRegister());
- Register reg = ToRegister(instr->InputAt(0));
- DeoptimizeIf(ne, instr->environment(), reg,
- Operand(instr->hydrogen()->target()));
+ Register reg = ToRegister(instr->value());
+ Handle<JSFunction> target = instr->hydrogen()->target();
+ if (isolate()->heap()->InNewSpace(*target)) {
+ Register reg = ToRegister(instr->value());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(target);
+ __ li(at, Operand(Handle<Object>(cell)));
+ __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset));
+ DeoptimizeIf(ne, instr->environment(), reg,
+ Operand(at));
+ } else {
+ DeoptimizeIf(ne, instr->environment(), reg,
+ Operand(target));
+ }
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Register scratch,
+ Handle<Map> map,
+ CompareMapMode mode,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMapAndBranch(reg, scratch, map, &success, eq, &success, mode);
+ DeoptimizeIf(al, env);
+ __ bind(&success);
+}
+
+
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
Register scratch = scratch0();
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
- DeoptimizeIf(ne,
- instr->environment(),
- scratch,
- Operand(instr->hydrogen()->map()));
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMapAndBranch(
+ reg, scratch, map, &success, eq, &success, REQUIRE_EXACT_MAP);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, scratch, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -4140,7 +4278,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
Label is_smi, done, heap_number;
// Both smi and heap number cases are handled.
- __ JumpIfSmi(input_reg, &is_smi);
+ __ UntagAndJumpIfSmi(scratch, input_reg, &is_smi);
// Check for heap number
__ lw(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
@@ -4160,28 +4298,13 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
__ ClampDoubleToUint8(result_reg, double_scratch0(), temp_reg);
__ jmp(&done);
- // smi
__ bind(&is_smi);
- __ SmiUntag(scratch, input_reg);
__ ClampUint8(result_reg, scratch);
__ bind(&done);
}
-void LCodeGen::LoadHeapObject(Register result,
- Handle<HeapObject> object) {
- if (heap()->InNewSpace(*object)) {
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(object);
- __ li(result, Operand(cell));
- __ lw(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
- } else {
- __ li(result, Operand(object));
- }
-}
-
-
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Register temp1 = ToRegister(instr->TempAt(0));
Register temp2 = ToRegister(instr->TempAt(1));
@@ -4190,40 +4313,135 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
- LoadHeapObject(temp1, current_prototype);
+ __ LoadHeapObject(temp1, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
- __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
- DeoptimizeIf(ne,
- instr->environment(),
- temp2,
- Operand(Handle<Map>(current_prototype->map())));
+ DoCheckMapCommon(temp1, temp2,
+ Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
- LoadHeapObject(temp1, current_prototype);
+ __ LoadHeapObject(temp1, current_prototype);
}
// Check the holder map.
- __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
- DeoptimizeIf(ne,
- instr->environment(),
- temp2,
- Operand(Handle<Map>(current_prototype->map())));
+ DoCheckMapCommon(temp1, temp2,
+ Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
}
-void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
- ASSERT_EQ(2, constant_elements->length());
- ElementsKind constant_elements_kind =
- static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Register scratch2 = ToRegister(instr->TempAt(1));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ scratch,
+ scratch2,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(map, constructor);
+ __ lw(map, FieldMemOperand(map, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ sw(map, FieldMemOperand(result, JSObject::kMapOffset));
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ sw(scratch, FieldMemOperand(result, JSObject::kElementsOffset));
+ __ sw(scratch, FieldMemOperand(result, JSObject::kPropertiesOffset));
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ sw(scratch, FieldMemOperand(result, property_offset));
+ }
+ }
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ mov(result, zero_reg);
+
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ li(a0, Operand(Smi::FromInt(instance_size)));
+ __ push(a0);
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
+ __ StoreToSafepointRegisterSlot(v0, result);
+}
+
+
+void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
+ Heap* heap = isolate()->heap();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate_elements_kind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to FAST_ELEMENTS.
+ if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ __ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object());
+ // Load map into a2.
+ __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ Ext(a2, a2, Map::kElementsKindShift, Map::kElementsKindBitCount);
+ DeoptimizeIf(ne,
+ instr->environment(),
+ a2,
+ Operand(boilerplate_elements_kind));
+ }
__ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
__ li(a2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ li(a1, Operand(constant_elements));
+ // Boilerplate already exists, constant elements are never accessed.
+ // Pass an empty fixed array.
+ __ li(a1, Operand(Handle<FixedArray>(heap->empty_fixed_array())));
__ Push(a3, a2, a1);
// Pick the right runtime function or stub to call.
@@ -4240,29 +4458,166 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset) {
+ ASSERT(!source.is(a2));
+ ASSERT(!result.is(a2));
+
+ // Only elements backing stores for non-COW arrays need to be copied.
+ Handle<FixedArrayBase> elements(object->elements());
+ bool has_elements = elements->length() > 0 &&
+ elements->map() != isolate()->heap()->fixed_cow_array_map();
+
+ // Increase the offset so that subsequent objects end up right after
+ // this object and its backing store.
+ int object_offset = *offset;
+ int object_size = object->map()->instance_size();
+ int elements_offset = *offset + object_size;
+ int elements_size = has_elements ? elements->Size() : 0;
+ *offset += object_size + elements_size;
+
+ // Copy object header.
+ ASSERT(object->properties()->length() == 0);
+ int inobject_properties = object->map()->inobject_properties();
+ int header_size = object_size - inobject_properties * kPointerSize;
+ for (int i = 0; i < header_size; i += kPointerSize) {
+ if (has_elements && i == JSObject::kElementsOffset) {
+ __ Addu(a2, result, Operand(elements_offset));
+ } else {
+ __ lw(a2, FieldMemOperand(source, i));
+ }
+ __ sw(a2, FieldMemOperand(result, object_offset + i));
+ }
+
+ // Copy in-object properties.
+ for (int i = 0; i < inobject_properties; i++) {
+ int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
+ Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ Addu(a2, result, Operand(*offset));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ } else {
+ __ li(a2, Operand(value));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ }
+ }
+
+
+ if (has_elements) {
+ // Copy elements backing store header.
+ __ LoadHeapObject(source, elements);
+ for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
+ __ lw(a2, FieldMemOperand(source, i));
+ __ sw(a2, FieldMemOperand(result, elements_offset + i));
+ }
+
+ // Copy elements backing store content.
+ int elements_length = has_elements ? elements->length() : 0;
+ if (elements->IsFixedDoubleArray()) {
+ Handle<FixedDoubleArray> double_array =
+ Handle<FixedDoubleArray>::cast(elements);
+ for (int i = 0; i < elements_length; i++) {
+ int64_t value = double_array->get_representation(i);
+ // We only support little endian mode...
+ int32_t value_low = value & 0xFFFFFFFF;
+ int32_t value_high = value >> 32;
+ int total_offset =
+ elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
+ __ li(a2, Operand(value_low));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ __ li(a2, Operand(value_high));
+ __ sw(a2, FieldMemOperand(result, total_offset + 4));
+ }
+ } else if (elements->IsFixedArray()) {
+ for (int i = 0; i < elements_length; i++) {
+ int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
+ Handle<Object> value = JSObject::GetElement(object, i);
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ Addu(a2, result, Operand(*offset));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(a2, Handle<HeapObject>::cast(value));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ } else {
+ __ li(a2, Operand(value));
+ __ sw(a2, FieldMemOperand(result, total_offset));
+ }
+ }
+ } else {
+ UNREACHABLE();
+ }
+ }
+}
+
+
+void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
+ int size = instr->hydrogen()->total_size();
+
+ // Allocate all objects that are part of the literal in one big
+ // allocation. This avoids multiple limit checks.
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ li(a0, Operand(Smi::FromInt(size)));
+ __ push(a0);
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+
+ __ bind(&allocated);
+ int offset = 0;
+ __ LoadHeapObject(a1, instr->hydrogen()->boilerplate());
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset);
+ ASSERT_EQ(size, offset);
+}
+
+
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
- __ lw(t0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- __ lw(t0, FieldMemOperand(t0, JSFunction::kLiteralsOffset));
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> constant_properties =
+ instr->hydrogen()->constant_properties();
+
+ // Set up the parameters to the stub/runtime call.
+ __ LoadHeapObject(t0, literals);
__ li(a3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
- __ li(a2, Operand(instr->hydrogen()->constant_properties()));
- __ li(a1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
+ __ li(a2, Operand(constant_properties));
+ int flags = instr->hydrogen()->fast_elements()
+ ? ObjectLiteral::kFastElements
+ : ObjectLiteral::kNoFlags;
+ __ li(a1, Operand(Smi::FromInt(flags)));
__ Push(t0, a3, a2, a1);
- // Pick the right runtime function to call.
+ // Pick the right runtime function or stub to call.
+ int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@@ -4335,7 +4690,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0) {
- FastNewClosureStub stub(shared_info->strict_mode_flag());
+ FastNewClosureStub stub(shared_info->language_mode());
__ li(a1, Operand(shared_info));
__ push(a1);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
@@ -4515,9 +4870,29 @@ void LCodeGen::EmitIsConstructCall(Register temp1, Register temp2) {
}
+void LCodeGen::EnsureSpaceForLazyDeopt() {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ int patch_size = Deoptimizer::patch_size();
+ if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
+ ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
+ while (padding_size > 0) {
+ __ nop();
+ padding_size -= Assembler::kInstrSize;
+ }
+ }
+ last_lazy_deopt_pc_ = masm()->pc_offset();
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
- // No code for lazy bailout instruction. Used to capture environment after a
- // call for populating the safepoint data with deoptimization data.
+ EnsureSpaceForLazyDeopt();
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4534,12 +4909,9 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
__ Push(object, key, strict);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
}
@@ -4550,27 +4922,20 @@ void LCodeGen::DoIn(LIn* instr) {
__ Push(key, obj);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
}
void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
- {
- PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
- RegisterLazyDeoptimization(
- instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- }
-
- // The gap code includes the restoring of the safepoint registers.
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
+ RecordSafepointWithLazyDeopt(
+ instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4585,6 +4950,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ // There is no LLazyBailout instruction for stack-checks. We have to
+ // prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
@@ -4592,7 +4961,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ Branch(&done, hs, sp, Operand(at));
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ EnsureSpaceForLazyDeopt();
__ bind(&done);
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
@@ -4600,8 +4972,13 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
new DeferredStackCheck(this, instr);
__ LoadRoot(at, Heap::kStackLimitRootIndex);
__ Branch(deferred_stack_check->entry(), lo, sp, Operand(at));
+ EnsureSpaceForLazyDeopt();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ // Don't record a deoptimization index for the safepoint here.
+ // This will be done explicitly when emitting call and the safepoint in
+ // the deferred code.
}
}
@@ -4617,12 +4994,95 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
// If the environment were already registered, we would have no way of
// backpatching it with the spill slot operands.
ASSERT(!environment->HasBeenRegistered());
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(osr_pc_offset_ == -1);
osr_pc_offset_ = masm()->pc_offset();
}
+void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
+ Register result = ToRegister(instr->result());
+ Register object = ToRegister(instr->object());
+ __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), object, Operand(at));
+
+ Register null_value = t1;
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
+ DeoptimizeIf(eq, instr->environment(), object, Operand(null_value));
+
+ __ And(at, object, kSmiTagMask);
+ DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
+
+ STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+ __ GetObjectType(object, a1, a1);
+ DeoptimizeIf(le, instr->environment(), a1, Operand(LAST_JS_PROXY_TYPE));
+
+ Label use_cache, call_runtime;
+ ASSERT(object.is(a0));
+ __ CheckEnumCache(null_value, &call_runtime);
+
+ __ lw(result, FieldMemOperand(object, HeapObject::kMapOffset));
+ __ Branch(&use_cache);
+
+ // Get the set of properties to enumerate.
+ __ bind(&call_runtime);
+ __ push(object);
+ CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
+
+ __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
+ ASSERT(result.is(v0));
+ __ LoadRoot(at, Heap::kMetaMapRootIndex);
+ DeoptimizeIf(ne, instr->environment(), a1, Operand(at));
+ __ bind(&use_cache);
+}
+
+
+void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
+ Register map = ToRegister(instr->map());
+ Register result = ToRegister(instr->result());
+ __ LoadInstanceDescriptors(map, result);
+ __ lw(result,
+ FieldMemOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ __ lw(result,
+ FieldMemOperand(result, FixedArray::SizeFor(instr->idx())));
+ DeoptimizeIf(eq, instr->environment(), result, Operand(zero_reg));
+}
+
+
+void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
+ Register object = ToRegister(instr->value());
+ Register map = ToRegister(instr->map());
+ __ lw(scratch0(), FieldMemOperand(object, HeapObject::kMapOffset));
+ DeoptimizeIf(ne, instr->environment(), map, Operand(scratch0()));
+}
+
+
+void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
+ Register object = ToRegister(instr->object());
+ Register index = ToRegister(instr->index());
+ Register result = ToRegister(instr->result());
+ Register scratch = scratch0();
+
+ Label out_of_object, done;
+ __ Branch(USE_DELAY_SLOT, &out_of_object, lt, index, Operand(zero_reg));
+ __ sll(scratch, index, kPointerSizeLog2 - kSmiTagSize); // In delay slot.
+
+ STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);
+ __ Addu(scratch, object, scratch);
+ __ lw(result, FieldMemOperand(scratch, JSObject::kHeaderSize));
+
+ __ Branch(&done);
+
+ __ bind(&out_of_object);
+ __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
+ // Index is equal to negated out of object property index plus 1.
+ __ Subu(scratch, result, scratch);
+ __ lw(result, FieldMemOperand(scratch,
+ FixedArray::kHeaderSize - kPointerSize));
+ __ bind(&done);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/mips/lithium-codegen-mips.h b/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
index f9c013c..b508256 100644
--- a/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
+++ b/src/3rdparty/v8/src/mips/lithium-codegen-mips.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,6 +58,7 @@ class LCodeGen BASE_EMBEDDED {
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
+ last_lazy_deopt_pc_(0),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -92,6 +93,9 @@ class LCodeGen BASE_EMBEDDED {
// Returns a MemOperand pointing to the high word of a DoubleStackSlot.
MemOperand ToHighMemOperand(LOperand* op) const;
+ bool IsInteger32(LConstantOperand* op) const;
+ Handle<Object> ToHandle(LConstantOperand* op) const;
+
// Try to generate code for the entire chunk, but it may fail if the
// chunk contains constructs we cannot handle. Returns true if the
// code generation attempt succeeded.
@@ -101,19 +105,20 @@ class LCodeGen BASE_EMBEDDED {
// information on it.
void FinishCode(Handle<Code> code);
- // Deferred code support.
- template<int T>
- void DoDeferredBinaryOpStub(LTemplateInstruction<1, 2, T>* instr,
- Token::Value op);
void DoDeferredNumberTagD(LNumberTagD* instr);
void DoDeferredNumberTagI(LNumberTagI* instr);
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
- void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
+ void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check);
+
+ void DoCheckMapCommon(Register reg, Register scratch, Handle<Map> map,
+ CompareMapMode mode, LEnvironment* env);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -141,16 +146,16 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
StrictModeFlag strict_mode_flag() const {
- return info()->strict_mode_flag();
+ return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
LChunk* chunk() const { return chunk_; }
Scope* scope() const { return scope_; }
HGraph* graph() const { return chunk_->graph(); }
- Register scratch0() { return lithiumScratchReg; }
- Register scratch1() { return lithiumScratchReg2; }
- DoubleRegister double_scratch0() { return lithiumScratchDouble; }
+ Register scratch0() { return kLithiumScratchReg; }
+ Register scratch1() { return kLithiumScratchReg2; }
+ DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
int GetNextEmittedBlock(int block);
LInstruction* GetNextInstruction();
@@ -216,14 +221,15 @@ class LCodeGen BASE_EMBEDDED {
void LoadHeapObject(Register result, Handle<HeapObject> object);
- void RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode);
+ void RecordSafepointWithLazyDeopt(LInstruction* instr,
+ SafepointMode safepoint_mode);
- void RegisterEnvironmentForDeoptimization(LEnvironment* environment);
+ void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc,
LEnvironment* environment,
- Register src1,
- const Operand& src2);
+ Register src1 = zero_reg,
+ const Operand& src2 = Operand(zero_reg));
void AddToTranslation(Translation* translation,
LOperand* op,
@@ -244,6 +250,7 @@ class LCodeGen BASE_EMBEDDED {
void DoMathSqrt(LUnaryMathOperation* instr);
void DoMathPowHalf(LUnaryMathOperation* instr);
void DoMathLog(LUnaryMathOperation* instr);
+ void DoMathTan(LUnaryMathOperation* instr);
void DoMathCos(LUnaryMathOperation* instr);
void DoMathSin(LUnaryMathOperation* instr);
@@ -251,19 +258,16 @@ class LCodeGen BASE_EMBEDDED {
void RecordSafepoint(LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index);
- void RecordSafepoint(LPointerMap* pointers, int deoptimization_index);
- void RecordSafepoint(int deoptimization_index);
+ Safepoint::DeoptMode mode);
+ void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
+ void RecordSafepoint(Safepoint::DeoptMode mode);
void RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordPosition(int position);
- int LastSafepointEnd() {
- return static_cast<int>(safepoints_.GetPcAfterGap());
- }
static Condition TokenToCondition(Token::Value op, bool is_unsigned);
void EmitGoto(int block);
@@ -281,6 +285,7 @@ class LCodeGen BASE_EMBEDDED {
void EmitNumberUntagD(Register input,
DoubleRegister result,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
@@ -300,9 +305,17 @@ class LCodeGen BASE_EMBEDDED {
// true and false label should be made, to optimize fallthrough.
Condition EmitIsObject(Register input,
Register temp1,
+ Register temp2,
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsString(x). Preserves input register.
+ // Returns the condition on which a final split to
+ // true and false label should be made, to optimize fallthrough.
+ Condition EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string);
+
// Emits optimized code for %_IsConstructCall().
// Caller should branch on equal condition.
void EmitIsConstructCall(Register temp1, Register temp2);
@@ -312,6 +325,13 @@ class LCodeGen BASE_EMBEDDED {
Handle<Map> type,
Handle<String> name);
+ // Emits optimized code to deep-copy the contents of statically known
+ // object graphs (e.g. object literal boilerplate).
+ void EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset);
+
struct JumpTableEntry {
explicit inline JumpTableEntry(Address entry)
: label(),
@@ -320,6 +340,8 @@ class LCodeGen BASE_EMBEDDED {
Address address;
};
+ void EnsureSpaceForLazyDeopt();
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -336,6 +358,7 @@ class LCodeGen BASE_EMBEDDED {
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
int osr_pc_offset_;
+ int last_lazy_deopt_pc_;
// Builder that keeps track of safepoints in the code. The table
// itself is emitted at the end of the generated code.
@@ -406,7 +429,7 @@ class LDeferredCode: public ZoneObject {
virtual void Generate() = 0;
virtual LInstruction* instr() = 0;
- void SetExit(Label *exit) { external_exit_ = exit; }
+ void SetExit(Label* exit) { external_exit_ = exit; }
Label* entry() { return &entry_; }
Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
int instruction_index() const { return instruction_index_; }
diff --git a/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc b/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
index 8f7f89c..4a5fbe3 100644
--- a/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-gap-resolver-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,9 +33,6 @@
namespace v8 {
namespace internal {
-static const Register kSavedValueRegister = lithiumScratchReg;
-static const DoubleRegister kSavedDoubleValueRegister = lithiumScratchDouble;
-
LGapResolver::LGapResolver(LCodeGen* owner)
: cgen_(owner),
moves_(32),
@@ -171,13 +168,13 @@ void LGapResolver::BreakCycle(int index) {
LOperand* source = moves_[index].source();
saved_destination_ = moves_[index].destination();
if (source->IsRegister()) {
- __ mov(kSavedValueRegister, cgen_->ToRegister(source));
+ __ mov(kLithiumScratchReg, cgen_->ToRegister(source));
} else if (source->IsStackSlot()) {
- __ lw(kSavedValueRegister, cgen_->ToMemOperand(source));
+ __ lw(kLithiumScratchReg, cgen_->ToMemOperand(source));
} else if (source->IsDoubleRegister()) {
- __ mov_d(kSavedDoubleValueRegister, cgen_->ToDoubleRegister(source));
+ __ mov_d(kLithiumScratchDouble, cgen_->ToDoubleRegister(source));
} else if (source->IsDoubleStackSlot()) {
- __ ldc1(kSavedDoubleValueRegister, cgen_->ToMemOperand(source));
+ __ ldc1(kLithiumScratchDouble, cgen_->ToMemOperand(source));
} else {
UNREACHABLE();
}
@@ -190,16 +187,16 @@ void LGapResolver::RestoreValue() {
ASSERT(in_cycle_);
ASSERT(saved_destination_ != NULL);
- // Spilled value is in kSavedValueRegister or kSavedDoubleValueRegister.
+ // Spilled value is in kLithiumScratchReg or kLithiumScratchDouble.
if (saved_destination_->IsRegister()) {
- __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
+ __ mov(cgen_->ToRegister(saved_destination_), kLithiumScratchReg);
} else if (saved_destination_->IsStackSlot()) {
- __ sw(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
+ __ sw(kLithiumScratchReg, cgen_->ToMemOperand(saved_destination_));
} else if (saved_destination_->IsDoubleRegister()) {
__ mov_d(cgen_->ToDoubleRegister(saved_destination_),
- kSavedDoubleValueRegister);
+ kLithiumScratchDouble);
} else if (saved_destination_->IsDoubleStackSlot()) {
- __ sdc1(kSavedDoubleValueRegister,
+ __ sdc1(kLithiumScratchDouble,
cgen_->ToMemOperand(saved_destination_));
} else {
UNREACHABLE();
@@ -239,28 +236,38 @@ void LGapResolver::EmitMove(int index) {
// Therefore we can't use 'at'. It is OK if the read from the source
// destroys 'at', since that happens before the value is read.
// This uses only a single reg of the double reg-pair.
- __ lwc1(kSavedDoubleValueRegister, source_operand);
- __ swc1(kSavedDoubleValueRegister, destination_operand);
+ __ lwc1(kLithiumScratchDouble, source_operand);
+ __ swc1(kLithiumScratchDouble, destination_operand);
} else {
__ lw(at, source_operand);
__ sw(at, destination_operand);
}
} else {
- __ lw(kSavedValueRegister, source_operand);
- __ sw(kSavedValueRegister, destination_operand);
+ __ lw(kLithiumScratchReg, source_operand);
+ __ sw(kLithiumScratchReg, destination_operand);
}
}
} else if (source->IsConstantOperand()) {
- Operand source_operand = cgen_->ToOperand(source);
+ LConstantOperand* constant_source = LConstantOperand::cast(source);
if (destination->IsRegister()) {
- __ li(cgen_->ToRegister(destination), source_operand);
+ Register dst = cgen_->ToRegister(destination);
+ if (cgen_->IsInteger32(constant_source)) {
+ __ li(dst, Operand(cgen_->ToInteger32(constant_source)));
+ } else {
+ __ LoadObject(dst, cgen_->ToHandle(constant_source));
+ }
} else {
ASSERT(destination->IsStackSlot());
ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone.
- MemOperand destination_operand = cgen_->ToMemOperand(destination);
- __ li(kSavedValueRegister, source_operand);
- __ sw(kSavedValueRegister, cgen_->ToMemOperand(destination));
+ if (cgen_->IsInteger32(constant_source)) {
+ __ li(kLithiumScratchReg,
+ Operand(cgen_->ToInteger32(constant_source)));
+ } else {
+ __ LoadObject(kLithiumScratchReg,
+ cgen_->ToHandle(constant_source));
+ }
+ __ sw(kLithiumScratchReg, cgen_->ToMemOperand(destination));
}
} else if (source->IsDoubleRegister()) {
@@ -281,19 +288,19 @@ void LGapResolver::EmitMove(int index) {
ASSERT(destination->IsDoubleStackSlot());
MemOperand destination_operand = cgen_->ToMemOperand(destination);
if (in_cycle_) {
- // kSavedDoubleValueRegister was used to break the cycle,
- // but kSavedValueRegister is free.
+ // kLithiumScratchDouble was used to break the cycle,
+ // but kLithiumScratchReg is free.
MemOperand source_high_operand =
cgen_->ToHighMemOperand(source);
MemOperand destination_high_operand =
cgen_->ToHighMemOperand(destination);
- __ lw(kSavedValueRegister, source_operand);
- __ sw(kSavedValueRegister, destination_operand);
- __ lw(kSavedValueRegister, source_high_operand);
- __ sw(kSavedValueRegister, destination_high_operand);
+ __ lw(kLithiumScratchReg, source_operand);
+ __ sw(kLithiumScratchReg, destination_operand);
+ __ lw(kLithiumScratchReg, source_high_operand);
+ __ sw(kLithiumScratchReg, destination_high_operand);
} else {
- __ ldc1(kSavedDoubleValueRegister, source_operand);
- __ sdc1(kSavedDoubleValueRegister, destination_operand);
+ __ ldc1(kLithiumScratchDouble, source_operand);
+ __ sdc1(kLithiumScratchDouble, destination_operand);
}
}
} else {
diff --git a/src/3rdparty/v8/src/mips/lithium-mips.cc b/src/3rdparty/v8/src/mips/lithium-mips.cc
index a9a302c..e0dd801 100644
--- a/src/3rdparty/v8/src/mips/lithium-mips.cc
+++ b/src/3rdparty/v8/src/mips/lithium-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -228,6 +228,13 @@ void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
}
+void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if is_string(");
+ InputAt(0)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
InputAt(0)->PrintTo(stream);
@@ -242,6 +249,14 @@ void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
}
+void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if string_compare(");
+ InputAt(0)->PrintTo(stream);
+ InputAt(1)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
InputAt(0)->PrintTo(stream);
@@ -425,7 +440,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -461,7 +476,7 @@ void LChunk::MarkEmptyBlocks() {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -536,8 +551,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -566,20 +581,15 @@ void LChunkBuilder::Abort(const char* format, ...) {
}
-LRegister* LChunkBuilder::ToOperand(Register reg) {
- return LRegister::Create(Register::ToAllocationIndex(reg));
-}
-
-
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- DoubleRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ DoubleRegister::ToAllocationIndex(reg));
}
@@ -594,29 +604,29 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
- LUnallocated::USED_AT_START));
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
LUnallocated::USED_AT_START));
}
@@ -652,7 +662,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -661,7 +671,7 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
HInstruction* instr = HInstruction::cast(value);
VisitInstruction(instr);
}
- allocator_->RecordUse(value, operand);
+ operand->set_virtual_register(value->id());
return operand;
}
@@ -669,36 +679,33 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
template<int I, int T>
LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result) {
- allocator_->RecordDefinition(current_instruction_, result);
+ result->set_virtual_register(current_instruction_->id());
instr->set_result(result);
return instr;
}
template<int I, int T>
-LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::NONE));
-}
-
-
-template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr, int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -780,44 +787,46 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
- allocator_->RecordTemporary(operand);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ operand->set_virtual_register(allocator_->GetVirtualRegister());
+ if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
}
LOperand* LChunkBuilder::FixedTemp(Register reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -829,7 +838,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -863,7 +872,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineAsRegister(new LShiftI(op, left, right, does_deopt));
+ DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -876,7 +885,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineAsRegister(result);
}
@@ -894,7 +903,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, a1);
LOperand* right_operand = UseFixed(right, a0);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -990,14 +1000,18 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber);
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
+ int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1006,31 +1020,44 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument((*argument_index_accumulator)++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
+ *argument_index_accumulator = argument_index;
+ }
+
return result;
}
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
- HValue* v = instr->value();
- if (v->EmitAtUses()) {
- HBasicBlock* successor = HConstant::cast(v)->ToBoolean()
+ HValue* value = instr->value();
+ if (value->EmitAtUses()) {
+ HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
+ }
+
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
+ // Tagged values that are not known smis or booleans require a
+ // deoptimization environment.
+ Representation rep = value->representation();
+ HType type = value->type();
+ if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
+ return AssignEnvironment(result);
}
- return AssignEnvironment(new LBranch(UseRegister(v)));
+ return result;
}
@@ -1038,24 +1065,25 @@ LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
LOperand* temp = TempRegister();
- return new LCmpMapAndBranch(value, temp);
+ return new(zone()) LCmpMapAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(UseRegister(length->value())));
+ return DefineAsRegister(
+ new(zone()) LArgumentsLength(UseRegister(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstanceOf* result =
- new LInstanceOf(UseFixed(instr->left(), a0),
- UseFixed(instr->right(), a1));
+ new(zone()) LInstanceOf(UseFixed(instr->left(), a0),
+ UseFixed(instr->right(), a1));
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1063,20 +1091,29 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), a0), FixedTemp(t0));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), a0),
+ FixedTemp(t0));
return MarkAsCall(DefineFixed(result, v0), instr);
}
+LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
+ LOperand* receiver = UseRegisterAtStart(instr->receiver());
+ LOperand* function = UseRegisterAtStart(instr->function());
+ LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
+ return AssignEnvironment(DefineSameAsFirst(result));
+}
+
+
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), a1);
LOperand* receiver = UseFixed(instr->receiver(), a0);
LOperand* length = UseFixed(instr->length(), a2);
LOperand* elements = UseFixed(instr->elements(), a3);
- LApplyArguments* result = new LApplyArguments(function,
- receiver,
- length,
- elements);
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
+ receiver,
+ length,
+ elements);
return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
}
@@ -1084,63 +1121,77 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = Use(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
+}
+
+
+LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalObject(context));
+ return DefineAsRegister(new(zone()) LGlobalObject(context));
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, v0), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), a1);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), f4);
- LUnaryMathOperation* result = new LUnaryMathOperation(input, NULL);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, NULL);
return MarkAsCall(DefineFixedDouble(result, f4), instr);
+ } else if (op == kMathPowHalf) {
+ // Input cannot be the same as the result.
+ // See lithium-codegen-mips.cc::DoMathPowHalf.
+ LOperand* input = UseFixedDouble(instr->value(), f8);
+ LOperand* temp = FixedTemp(f6);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
+ return DefineFixedDouble(result, f4);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
LOperand* temp = (op == kMathFloor) ? TempRegister() : NULL;
- LUnaryMathOperation* result = new LUnaryMathOperation(input, temp);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input, temp);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
@@ -1150,8 +1201,6 @@ LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
return DefineAsRegister(result);
case kMathRound:
return AssignEnvironment(DefineAsRegister(result));
- case kMathPowHalf:
- return DefineAsRegister(result);
default:
UNREACHABLE();
return NULL;
@@ -1164,45 +1213,47 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
argument_count_ -= instr->argument_count();
LOperand* key = UseFixed(instr->key(), a2);
- return MarkAsCall(DefineFixed(new LCallKeyed(key), v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKeyed(key), v0), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, v0), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, v0), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, v0), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), a1);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
+ LOperand* function = UseFixed(instr->function(), a1);
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallFunction, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallFunction(function), v0),
+ instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, v0), instr);
}
@@ -1228,7 +1279,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineAsRegister(new LBitI(left, right));
+ return DefineAsRegister(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1236,7 +1287,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
}
@@ -1245,7 +1296,8 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
- return DefineAsRegister(new LBitNotI(UseRegisterAtStart(instr->value())));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return DefineAsRegister(new(zone()) LBitNotI(value));
}
@@ -1261,7 +1313,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* dividend = UseFixed(instr->left(), a0);
LOperand* divisor = UseFixed(instr->right(), a1);
return AssignEnvironment(AssignPointerMap(
- DefineFixed(new LDivI(dividend, divisor), v0)));
+ DefineFixed(new(zone()) LDivI(dividend, divisor), v0)));
} else {
return DoArithmeticT(Token::DIV, instr);
}
@@ -1277,15 +1329,15 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- mod = new LModI(value, UseOrConstant(instr->right()));
+ mod = new(zone()) LModI(value, UseOrConstant(instr->right()));
} else {
LOperand* dividend = UseRegister(instr->left());
LOperand* divisor = UseRegister(instr->right());
- mod = new LModI(dividend,
- divisor,
- TempRegister(),
- FixedTemp(f20),
- FixedTemp(f22));
+ mod = new(zone()) LModI(dividend,
+ divisor,
+ TempRegister(),
+ FixedTemp(f20),
+ FixedTemp(f22));
}
if (instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
@@ -1303,7 +1355,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), f2);
LOperand* right = UseFixedDouble(instr->right(), f4);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, f2), instr);
}
}
@@ -1324,7 +1376,12 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
} else {
left = UseRegisterAtStart(instr->LeastConstantOperand());
}
- return AssignEnvironment(DefineAsRegister(new LMulI(left, right, temp)));
+ LMulI* mul = new(zone()) LMulI(left, right, temp);
+ if (instr->CheckFlag(HValue::kCanOverflow) ||
+ instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ AssignEnvironment(mul);
+ }
+ return DefineAsRegister(mul);
} else if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::MUL, instr);
@@ -1341,7 +1398,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineAsRegister(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1361,7 +1418,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineAsRegister(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1385,21 +1442,30 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
LOperand* left = UseFixedDouble(instr->left(), f2);
LOperand* right = exponent_type.IsDouble() ?
UseFixedDouble(instr->right(), f4) :
- UseFixed(instr->right(), a0);
- LPower* result = new LPower(left, right);
- return MarkAsCall(DefineFixedDouble(result, f6),
+ UseFixed(instr->right(), a2);
+ LPower* result = new(zone()) LPower(left, right);
+ return MarkAsCall(DefineFixedDouble(result, f0),
instr,
CAN_DEOPTIMIZE_EAGERLY);
}
+LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->global_object()->representation().IsTagged());
+ LOperand* global_object = UseFixed(instr->global_object(), a0);
+ LRandom* result = new(zone()) LRandom(global_object);
+ return MarkAsCall(DefineFixedDouble(result, f0), instr);
+}
+
+
LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
Representation r = instr->GetInputRepresentation();
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), a1);
LOperand* right = UseFixed(instr->right(), a0);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -1412,14 +1478,14 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseRegisterOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
ASSERT(instr->right()->representation().IsDouble());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1428,47 +1494,70 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LCmpConstantEqAndBranch(
+ UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()));
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = TempRegister();
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
+ temp);
+}
+
+
+LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* temp = TempRegister();
+ return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
+ temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ return new(zone()) LIsUndetectableAndBranch(
+ UseRegisterAtStart(instr->value()), TempRegister());
+}
+
+
+LInstruction* LChunkBuilder::DoStringCompareAndBranch(
+ HStringCompareAndBranch* instr) {
+ ASSERT(instr->left()->representation().IsTagged());
+ ASSERT(instr->right()->representation().IsTagged());
+ LOperand* left = UseFixed(instr->left(), a1);
+ LOperand* right = UseFixed(instr->right(), a0);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
+ return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1477,14 +1566,14 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(
+ return new(zone()) LHasCachedArrayIndexAndBranch(
UseRegisterAtStart(instr->value()));
}
@@ -1492,40 +1581,48 @@ LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LClassOfTestAndBranch(UseTempRegister(instr->value()),
- TempRegister());
+ return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
+ TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object, TempRegister());
- return AssignEnvironment(DefineAsRegister(result));
+ LValueOf* result = new(zone()) LValueOf(object, TempRegister());
+ return DefineAsRegister(result);
+}
+
+
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* object = UseFixed(instr->value(), a0);
+ LDateField* result = new LDateField(object, FixedTemp(a1), instr->index());
+ return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- UseRegister(instr->length())));
+ LOperand* value = UseRegisterAtStart(instr->index());
+ LOperand* length = UseRegister(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1538,7 +1635,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), a0);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1561,22 +1658,25 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
- LOperand* value = UseRegister(instr->value());
+ LOperand* value = UseRegisterAtStart(instr->value());
bool needs_check = !instr->value()->type().IsSmi();
LInstruction* res = NULL;
if (!needs_check) {
- res = DefineSameAsFirst(new LSmiUntag(value, needs_check));
+ res = DefineAsRegister(new(zone()) LSmiUntag(value, needs_check));
} else {
LOperand* temp1 = TempRegister();
LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister()
: NULL;
LOperand* temp3 = instr->CanTruncateToInt32() ? FixedTemp(f22)
: NULL;
- res = DefineSameAsFirst(new LTaggedToI(value, temp1, temp2, temp3));
+ res = DefineSameAsFirst(new(zone()) LTaggedToI(value,
+ temp1,
+ temp2,
+ temp3));
res = AssignEnvironment(res);
}
return res;
@@ -1590,32 +1690,31 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that the temp and result_temp registers are
// different.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp1, temp2);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
Define(result, result_temp);
return AssignPointerMap(result);
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- LDoubleToI* res =
- new LDoubleToI(value,
- TempRegister(),
- instr->CanTruncateToInt32() ? TempRegister() : NULL);
+ LOperand* temp1 = TempRegister();
+ LOperand* temp2 = instr->CanTruncateToInt32() ? TempRegister() : NULL;
+ LDoubleToI* res = new(zone()) LDoubleToI(value, temp1, temp2);
return AssignEnvironment(DefineAsRegister(res));
}
} else if (from.IsInteger32()) {
if (to.IsTagged()) {
HValue* val = instr->value();
- LOperand* value = UseRegister(val);
+ LOperand* value = UseRegisterAtStart(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineSameAsFirst(new LSmiTag(value));
+ return DefineAsRegister(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
- return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
+ return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
} else {
ASSERT(to.IsDouble());
LOperand* value = Use(instr->value());
- return DefineAsRegister(new LInteger32ToDouble(value));
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1625,13 +1724,13 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckInstanceType(value);
+ LInstruction* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
@@ -1639,26 +1738,26 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp1 = TempRegister();
LOperand* temp2 = TempRegister();
- LInstruction* result = new LCheckPrototypeMaps(temp1, temp2);
+ LInstruction* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LInstruction* result = new LCheckMap(value);
+ LInstruction* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -1669,57 +1768,32 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
// Revisit this decision, here and 8 lines below.
- return DefineAsRegister(new LClampDToUint8(reg, FixedTemp(f22)));
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg, FixedTemp(f22)));
} else if (input_rep.IsInteger32()) {
- return DefineAsRegister(new LClampIToUint8(reg));
+ return DefineAsRegister(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve f22 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg, FixedTemp(f22));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg, FixedTemp(f22));
return AssignEnvironment(DefineAsRegister(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LDoubleToI* res = new LDoubleToI(reg, temp1, temp2);
- return AssignEnvironment(DefineAsRegister(res));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* temp1 = TempRegister();
- LOperand* temp2 = TempRegister();
- LOperand* temp3 = FixedTemp(f22);
- LTaggedToI* res = new LTaggedToI(reg, temp1, temp2, temp3);
- return AssignEnvironment(DefineSameAsFirst(res));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), v0));
+ return new(zone()) LReturn(UseFixed(instr->value(), v0));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
- return DefineAsRegister(new LConstantD);
+ return DefineAsRegister(new(zone()) LConstantD);
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1728,7 +1802,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1737,17 +1811,18 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), a0);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
- LOperand* temp = TempRegister();
- LOperand* value = UseTempRegister(instr->value());
- LInstruction* result = new LStoreGlobalCell(value, temp);
- if (instr->RequiresHoleCheck()) result = AssignEnvironment(result);
- return result;
+ LOperand* value = UseRegister(instr->value());
+ // Use a temp to check the value in the cell in the case where we perform
+ // a hole check.
+ return instr->RequiresHoleCheck()
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
@@ -1755,14 +1830,16 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), a1);
LOperand* value = UseFixed(instr->value(), a0);
LStoreGlobalGeneric* result =
- new LStoreGlobalGeneric(global_object, value);
+ new(zone()) LStoreGlobalGeneric(global_object, value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1776,13 +1853,14 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
context = UseRegister(instr->context());
value = UseRegister(instr->value());
}
- return new LStoreContextSlot(context, value);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value);
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
return DefineAsRegister(
- new LLoadNamedField(UseRegisterAtStart(instr->object())));
+ new(zone()) LLoadNamedField(UseRegisterAtStart(instr->object())));
}
@@ -1791,11 +1869,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), a0);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, v0), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1803,7 +1883,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), a0);
- LInstruction* result = DefineFixed(new LLoadNamedGeneric(object), v0);
+ LInstruction* result = DefineFixed(new(zone()) LLoadNamedGeneric(object), v0);
return MarkAsCall(result, instr);
}
@@ -1811,20 +1891,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1834,8 +1914,9 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineAsRegister(result));
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
+ if (instr->RequiresHoleCheck()) AssignEnvironment(result);
+ return DefineAsRegister(result);
}
@@ -1846,7 +1927,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseTempRegister(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1866,7 +1947,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1880,7 +1961,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* key = UseFixed(instr->key(), a0);
LInstruction* result =
- DefineFixed(new LLoadKeyedGeneric(object, key), v0);
+ DefineFixed(new(zone()) LLoadKeyedGeneric(object, key), v0);
return MarkAsCall(result, instr);
}
@@ -1899,8 +1980,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
-
- return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1914,7 +1994,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
@@ -1941,9 +2021,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -1956,7 +2036,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- return MarkAsCall(new LStoreKeyedGeneric(obj, key, val), instr);
+ return MarkAsCall(new(zone()) LStoreKeyedGeneric(obj, key, val), instr);
}
@@ -1967,14 +2047,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* object = UseRegister(instr->object());
LOperand* new_map_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, NULL);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, NULL);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), a0);
LOperand* fixed_object_reg = FixedTemp(a2);
LOperand* new_map_reg = FixedTemp(a3);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, v0), instr);
}
}
@@ -1991,7 +2073,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
? UseTempRegister(instr->value())
: UseRegister(instr->value());
- return new LStoreNamedField(obj, val);
+ return new(zone()) LStoreNamedField(obj, val);
}
@@ -1999,7 +2081,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* obj = UseFixed(instr->object(), a1);
LOperand* val = UseFixed(instr->value(), a0);
- LInstruction* result = new LStoreNamedGeneric(obj, val);
+ LInstruction* result = new(zone()) LStoreNamedGeneric(obj, val);
return MarkAsCall(result, instr);
}
@@ -2007,55 +2089,68 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), v0),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new(zone()) LAllocateObject(
+ TempRegister(), TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
+}
+
+
+LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, v0), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LOperand* object = UseFixed(instr->object(), a0);
LOperand* key = UseFixed(instr->key(), a1);
- LDeleteProperty* result = new LDeleteProperty(object, key);
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, v0), instr);
}
@@ -2063,13 +2158,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2079,13 +2174,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, v0), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, v0), instr);
}
@@ -2102,32 +2197,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = UseRegister(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), a0);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseFixed(instr->value(), a0));
+ LTypeof* result = new(zone()) LTypeof(UseFixed(instr->value(), a0));
return MarkAsCall(DefineFixed(result, v0), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2150,7 +2246,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LInstruction* result = new LLazyBailout;
+ LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2164,10 +2260,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2176,9 +2272,14 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
HEnvironment* outer = current_block_->last_environment();
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
+ instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
+ if (instr->arguments() != NULL) {
+ inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ }
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2186,7 +2287,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
- HEnvironment* outer = current_block_->last_environment()->outer();
+ HEnvironment* outer = current_block_->last_environment()->
+ DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
return NULL;
}
@@ -2195,9 +2297,37 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseRegisterAtStart(instr->key());
LOperand* object = UseRegisterAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, v0), instr);
}
+LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
+ LOperand* object = UseFixed(instr->enumerable(), a0);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
+ return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
+ LOperand* map = UseRegister(instr->map());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LForInCacheArray(map)));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* map = UseRegisterAtStart(instr->map());
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
+ LOperand* object = UseRegister(instr->object());
+ LOperand* index = UseRegister(instr->index());
+ return DefineAsRegister(new(zone()) LLoadFieldByIndex(object, index));
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/mips/lithium-mips.h b/src/3rdparty/v8/src/mips/lithium-mips.h
index 71f0bb2..952df99 100644
--- a/src/3rdparty/v8/src/mips/lithium-mips.h
+++ b/src/3rdparty/v8/src/mips/lithium-mips.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -70,7 +71,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -87,11 +88,13 @@ class LCodeGen;
V(ConstantI) \
V(ConstantT) \
V(Context) \
+ V(DeclareGlobals) \
V(DeleteProperty) \
V(Deoptimize) \
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
+ V(FastLiteral) \
V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
@@ -109,8 +112,10 @@ class LCodeGen;
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
V(IsObjectAndBranch) \
+ V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
+ V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -138,6 +143,7 @@ class LCodeGen;
V(Parameter) \
V(Power) \
V(PushArgument) \
+ V(Random) \
V(RegExpLiteral) \
V(Return) \
V(ShiftI) \
@@ -167,8 +173,13 @@ class LCodeGen;
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
- V(ValueOf)
-
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex) \
+ V(DateField) \
+ V(WrapReceiver)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
virtual Opcode opcode() const { return LInstruction::k##type; } \
@@ -457,6 +468,20 @@ class LControlInstruction: public LTemplateInstruction<0, I, T> {
};
+class LWrapReceiver: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LWrapReceiver(LOperand* receiver, LOperand* function) {
+ inputs_[0] = receiver;
+ inputs_[1] = function;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
+
+ LOperand* receiver() { return inputs_[0]; }
+ LOperand* function() { return inputs_[1]; }
+};
+
+
class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
public:
LApplyArguments(LOperand* function,
@@ -658,6 +683,20 @@ class LIsObjectAndBranch: public LControlInstruction<1, 1> {
};
+class LIsStringAndBranch: public LControlInstruction<1, 1> {
+ public:
+ LIsStringAndBranch(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LIsSmiAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsSmiAndBranch(LOperand* value) {
@@ -686,6 +725,23 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
};
+class LStringCompareAndBranch: public LControlInstruction<2, 0> {
+ public:
+ LStringCompareAndBranch(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
+ "string-compare-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
+
+ Token::Value op() const { return hydrogen()->token(); }
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
public:
explicit LHasInstanceTypeAndBranch(LOperand* value) {
@@ -948,6 +1004,22 @@ class LValueOf: public LTemplateInstruction<1, 1, 1> {
};
+class LDateField: public LTemplateInstruction<1, 1, 1> {
+ public:
+ LDateField(LOperand* date, LOperand* temp, Smi* index) : index_(index) {
+ inputs_[0] = date;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(ValueOf)
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
@@ -992,6 +1064,17 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
};
+class LRandom: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LRandom(LOperand* global_object) {
+ inputs_[0] = global_object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Random, "random")
+ DECLARE_HYDROGEN_ACCESSOR(Random)
+};
+
+
class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
public:
LArithmeticD(Token::Value op, LOperand* left, LOperand* right)
@@ -1208,6 +1291,8 @@ class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
+
+ LOperand* value() { return inputs_[0]; }
};
@@ -1225,7 +1310,7 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
LOperand* value() { return InputAt(1); }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1298,6 +1383,13 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
};
+class LDeclareGlobals: public LTemplateInstruction<0, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals, "declare-globals")
+ DECLARE_HYDROGEN_ACCESSOR(DeclareGlobals)
+};
+
+
class LGlobalObject: public LTemplateInstruction<1, 1, 0> {
public:
explicit LGlobalObject(LOperand* context) {
@@ -1379,12 +1471,17 @@ class LCallNamed: public LTemplateInstruction<1, 0, 0> {
};
-class LCallFunction: public LTemplateInstruction<1, 0, 0> {
+class LCallFunction: public LTemplateInstruction<1, 1, 0> {
public:
+ explicit LCallFunction(LOperand* function) {
+ inputs_[0] = function;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- int arity() const { return hydrogen()->argument_count() - 2; }
+ LOperand* function() { return inputs_[0]; }
+ int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1580,7 +1677,6 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
- bool strict_mode() { return strict_mode_flag() == kStrictMode; }
};
@@ -1642,7 +1738,7 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
@@ -1755,6 +1851,8 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return InputAt(0); }
+
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
};
@@ -1771,14 +1869,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -1855,6 +1953,25 @@ class LClampTToUint8: public LTemplateInstruction<1, 1, 1> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 2> {
+ public:
+ LAllocateObject(LOperand* temp1, LOperand* temp2) {
+ temps_[0] = temp1;
+ temps_[1] = temp2;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
+class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
+ DECLARE_HYDROGEN_ACCESSOR(FastLiteral)
+};
+
+
class LArrayLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ArrayLiteral, "array-literal")
@@ -1995,6 +2112,62 @@ class LIn: public LTemplateInstruction<1, 2, 0> {
};
+class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInPrepareMap(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
+};
+
+
+class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInCacheArray(LOperand* map) {
+ inputs_[0] = map;
+ }
+
+ LOperand* map() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
+
+ int idx() {
+ return HForInCacheArray::cast(this->hydrogen_value())->idx();
+ }
+};
+
+
+class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
+ public:
+ LCheckMapValue(LOperand* value, LOperand* map) {
+ inputs_[0] = value;
+ inputs_[1] = map;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* map() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
+};
+
+
+class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadFieldByIndex(LOperand* object, LOperand* index) {
+ inputs_[0] = object;
+ inputs_[1] = index;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+ LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
+};
+
+
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
@@ -2062,6 +2235,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2091,6 +2265,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2100,7 +2275,6 @@ class LChunkBuilder BASE_EMBEDDED {
void Abort(const char* format, ...);
// Methods for getting operands for Use / Define / Temp.
- LRegister* ToOperand(Register reg);
LUnallocated* ToUnallocated(Register reg);
LUnallocated* ToUnallocated(DoubleRegister reg);
@@ -2151,8 +2325,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result);
template<int I, int T>
- LInstruction* Define(LTemplateInstruction<1, I, T>* instr);
- template<int I, int T>
LInstruction* DefineAsRegister(LTemplateInstruction<1, I, T>* instr);
template<int I, int T>
LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr,
@@ -2199,6 +2371,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/src/3rdparty/v8/src/mips/macro-assembler-mips.cc b/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
index 36c4f45..e93a417 100644
--- a/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/macro-assembler-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -81,6 +81,19 @@ void MacroAssembler::StoreRoot(Register source,
}
+void MacroAssembler::LoadHeapObject(Register result,
+ Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ li(result, Operand(cell));
+ lw(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
+ } else {
+ li(result, Operand(object));
+ }
+}
+
+
// Push and pop all registers that can hold pointers.
void MacroAssembler::PushSafepointRegisters() {
// Safepoints expect a block of kNumSafepointRegisters values on the
@@ -239,12 +252,17 @@ void MacroAssembler::RecordWrite(Register object,
// registers are cp.
ASSERT(!address.is(cp) && !value.is(cp));
+ if (emit_debug_code()) {
+ lw(at, MemOperand(address));
+ Assert(
+ eq, "Wrong address or value passed to RecordWrite", at, Operand(value));
+ }
+
Label done;
if (smi_check == INLINE_SMI_CHECK) {
ASSERT_EQ(0, kSmiTag);
- And(t8, value, Operand(kSmiTagMask));
- Branch(&done, eq, t8, Operand(zero_reg));
+ JumpIfSmi(value, &done);
}
CheckPageFlag(value,
@@ -285,7 +303,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
SaveFPRegsMode fp_mode,
RememberedSetFinalAction and_then) {
Label done;
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
Label ok;
JumpIfNotInNewSpace(object, scratch, &ok);
stop("Remembered set pointer is in new space");
@@ -397,6 +415,46 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
+ // First of all we assign the hash seed to scratch.
+ LoadRoot(scratch, Heap::kHashSeedRootIndex);
+ SmiUntag(scratch);
+
+ // Xor original key with a seed.
+ xor_(reg0, reg0, scratch);
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ nor(scratch, reg0, zero_reg);
+ sll(at, reg0, 15);
+ addu(reg0, scratch, at);
+
+ // hash = hash ^ (hash >> 12);
+ srl(at, reg0, 12);
+ xor_(reg0, reg0, at);
+
+ // hash = hash + (hash << 2);
+ sll(at, reg0, 2);
+ addu(reg0, reg0, at);
+
+ // hash = hash ^ (hash >> 4);
+ srl(at, reg0, 4);
+ xor_(reg0, reg0, at);
+
+ // hash = hash * 2057;
+ sll(scratch, reg0, 11);
+ sll(at, reg0, 3);
+ addu(reg0, reg0, at);
+ addu(reg0, reg0, scratch);
+
+ // hash = hash ^ (hash >> 16);
+ srl(at, reg0, 16);
+ xor_(reg0, reg0, at);
+}
+
+
void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Register elements,
Register key,
@@ -428,36 +486,10 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// at - Temporary (avoid MacroAssembler instructions also using 'at').
Label done;
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- nor(reg1, reg0, zero_reg);
- sll(at, reg0, 15);
- addu(reg0, reg1, at);
-
- // hash = hash ^ (hash >> 12);
- srl(at, reg0, 12);
- xor_(reg0, reg0, at);
-
- // hash = hash + (hash << 2);
- sll(at, reg0, 2);
- addu(reg0, reg0, at);
-
- // hash = hash ^ (hash >> 4);
- srl(at, reg0, 4);
- xor_(reg0, reg0, at);
-
- // hash = hash * 2057;
- li(reg1, Operand(2057));
- mul(reg0, reg0, reg1);
-
- // hash = hash ^ (hash >> 16);
- srl(at, reg0, 16);
- xor_(reg0, reg0, at);
+ GetNumberHash(reg0, reg1);
// Compute the capacity mask.
- lw(reg1, FieldMemOperand(elements, NumberDictionary::kCapacityOffset));
+ lw(reg1, FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
sra(reg1, reg1, kSmiTagSize);
Subu(reg1, reg1, Operand(1));
@@ -468,12 +500,12 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
mov(reg2, reg0);
// Compute the masked index: (hash + i + i * i) & mask.
if (i > 0) {
- Addu(reg2, reg2, Operand(NumberDictionary::GetProbeOffset(i)));
+ Addu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(i)));
}
and_(reg2, reg2, reg1);
// Scale the index by multiplying by the element size.
- ASSERT(NumberDictionary::kEntrySize == 3);
+ ASSERT(SeededNumberDictionary::kEntrySize == 3);
sll(at, reg2, 1); // 2x.
addu(reg2, reg2, at); // reg2 = reg2 * 3.
@@ -481,7 +513,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
sll(at, reg2, kPointerSizeLog2);
addu(reg2, elements, at);
- lw(at, FieldMemOperand(reg2, NumberDictionary::kElementsStartOffset));
+ lw(at, FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
if (i != kProbes - 1) {
Branch(&done, eq, key, Operand(at));
} else {
@@ -493,14 +525,14 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// Check that the value is a normal property.
// reg2: elements + (index * kPointerSize).
const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
lw(reg1, FieldMemOperand(reg2, kDetailsOffset));
And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
Branch(miss, ne, at, Operand(zero_reg));
// Get the value at the masked, scaled index and return.
const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + kPointerSize;
lw(result, FieldMemOperand(reg2, kValueOffset));
}
@@ -542,12 +574,22 @@ void MacroAssembler::Subu(Register rd, Register rs, const Operand& rt) {
void MacroAssembler::Mul(Register rd, Register rs, const Operand& rt) {
if (rt.is_reg()) {
- mul(rd, rs, rt.rm());
+ if (kArchVariant == kLoongson) {
+ mult(rs, rt.rm());
+ mflo(rd);
+ } else {
+ mul(rd, rs, rt.rm());
+ }
} else {
// li handles the relocation.
ASSERT(!rs.is(at));
li(at, rt);
- mul(rd, rs, at);
+ if (kArchVariant == kLoongson) {
+ mult(rs, at);
+ mflo(rd);
+ } else {
+ mul(rd, rs, at);
+ }
}
}
@@ -702,7 +744,7 @@ void MacroAssembler::Sltu(Register rd, Register rs, const Operand& rt) {
void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
if (rt.is_reg()) {
rotrv(rd, rs, rt.rm());
} else {
@@ -726,31 +768,30 @@ void MacroAssembler::Ror(Register rd, Register rs, const Operand& rt) {
}
}
-
//------------Pseudo-instructions-------------
-void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
+void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
ASSERT(!j.is_reg());
BlockTrampolinePoolScope block_trampoline_pool(this);
- if (!MustUseReg(j.rmode_) && !gen2instr) {
+ if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
// Normal load of an immediate value which does not need Relocation Info.
if (is_int16(j.imm32_)) {
addiu(rd, zero_reg, j.imm32_);
} else if (!(j.imm32_ & kHiMask)) {
ori(rd, zero_reg, j.imm32_);
} else if (!(j.imm32_ & kImm16Mask)) {
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
} else {
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
- } else if (MustUseReg(j.rmode_) || gen2instr) {
+ } else {
if (MustUseReg(j.rmode_)) {
RecordRelocInfo(j.rmode_, j.imm32_);
}
- // We need always the same number of instructions as we may need to patch
+ // We always need the same number of instructions as we may need to patch
// this code to load another value which may need 2 instructions to load.
- lui(rd, (j.imm32_ & kHiMask) >> kLuiShift);
+ lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
}
@@ -890,7 +931,7 @@ void MacroAssembler::Ext(Register rt,
ASSERT(pos < 32);
ASSERT(pos + size < 33);
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
ext_(rt, rs, pos, size);
} else {
// Move rs to rt and shift it left then right to get the
@@ -911,34 +952,21 @@ void MacroAssembler::Ins(Register rt,
uint16_t pos,
uint16_t size) {
ASSERT(pos < 32);
- ASSERT(pos + size < 32);
+ ASSERT(pos + size <= 32);
+ ASSERT(size != 0);
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
ins_(rt, rs, pos, size);
} else {
ASSERT(!rt.is(t8) && !rs.is(t8));
-
- srl(t8, rt, pos + size);
- // The left chunk from rt that needs to
- // be saved is on the right side of t8.
- sll(at, t8, pos + size);
- // The 'at' register now contains the left chunk on
- // the left (proper position) and zeroes.
- sll(t8, rt, 32 - pos);
- // t8 now contains the right chunk on the left and zeroes.
- srl(t8, t8, 32 - pos);
- // t8 now contains the right chunk on
- // the right (proper position) and zeroes.
- or_(rt, at, t8);
- // rt now contains the left and right chunks from the original rt
- // in their proper position and zeroes in the middle.
- sll(t8, rs, 32 - size);
- // t8 now contains the chunk from rs on the left and zeroes.
- srl(t8, t8, 32 - size - pos);
- // t8 now contains the original chunk from rs in
- // the middle (proper position).
- or_(rt, rt, t8);
- // rt now contains the result of the ins instruction in R2 mode.
+ Subu(at, zero_reg, Operand(1));
+ srl(at, at, 32 - size);
+ and_(t8, rs, at);
+ sll(t8, t8, pos);
+ sll(at, at, pos);
+ nor(at, at, zero_reg);
+ and_(at, rt, at);
+ or_(rt, t8, at);
}
}
@@ -997,6 +1025,48 @@ void MacroAssembler::Trunc_uw_d(FPURegister fd,
mtc1(t8, fd);
}
+void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
+ if (kArchVariant == kLoongson && fd.is(fs)) {
+ mfc1(t8, FPURegister::from_code(fs.code() + 1));
+ trunc_w_d(fd, fs);
+ mtc1(t8, FPURegister::from_code(fs.code() + 1));
+ } else {
+ trunc_w_d(fd, fs);
+ }
+}
+
+void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
+ if (kArchVariant == kLoongson && fd.is(fs)) {
+ mfc1(t8, FPURegister::from_code(fs.code() + 1));
+ round_w_d(fd, fs);
+ mtc1(t8, FPURegister::from_code(fs.code() + 1));
+ } else {
+ round_w_d(fd, fs);
+ }
+}
+
+
+void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
+ if (kArchVariant == kLoongson && fd.is(fs)) {
+ mfc1(t8, FPURegister::from_code(fs.code() + 1));
+ floor_w_d(fd, fs);
+ mtc1(t8, FPURegister::from_code(fs.code() + 1));
+ } else {
+ floor_w_d(fd, fs);
+ }
+}
+
+
+void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
+ if (kArchVariant == kLoongson && fd.is(fs)) {
+ mfc1(t8, FPURegister::from_code(fs.code() + 1));
+ ceil_w_d(fd, fs);
+ mtc1(t8, FPURegister::from_code(fs.code() + 1));
+ } else {
+ ceil_w_d(fd, fs);
+ }
+}
+
void MacroAssembler::Trunc_uw_d(FPURegister fd,
Register rs,
@@ -1127,6 +1197,104 @@ void MacroAssembler::Move(FPURegister dst, double imm) {
}
+void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
+ if (kArchVariant == kLoongson) {
+ Label done;
+ Branch(&done, ne, rt, Operand(zero_reg));
+ mov(rd, rs);
+ bind(&done);
+ } else {
+ movz(rd, rs, rt);
+ }
+}
+
+
+void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
+ if (kArchVariant == kLoongson) {
+ Label done;
+ Branch(&done, eq, rt, Operand(zero_reg));
+ mov(rd, rs);
+ bind(&done);
+ } else {
+ movn(rd, rs, rt);
+ }
+}
+
+
+void MacroAssembler::Movt(Register rd, Register rs, uint16_t cc) {
+ if (kArchVariant == kLoongson) {
+ // Tests an FP condition code and then conditionally move rs to rd.
+ // We do not currently use any FPU cc bit other than bit 0.
+ ASSERT(cc == 0);
+ ASSERT(!(rs.is(t8) || rd.is(t8)));
+ Label done;
+ Register scratch = t8;
+ // For testing purposes we need to fetch content of the FCSR register and
+ // than test its cc (floating point condition code) bit (for cc = 0, it is
+ // 24. bit of the FCSR).
+ cfc1(scratch, FCSR);
+ // For the MIPS I, II and III architectures, the contents of scratch is
+ // UNPREDICTABLE for the instruction immediately following CFC1.
+ nop();
+ srl(scratch, scratch, 16);
+ andi(scratch, scratch, 0x0080);
+ Branch(&done, eq, scratch, Operand(zero_reg));
+ mov(rd, rs);
+ bind(&done);
+ } else {
+ movt(rd, rs, cc);
+ }
+}
+
+
+void MacroAssembler::Movf(Register rd, Register rs, uint16_t cc) {
+ if (kArchVariant == kLoongson) {
+ // Tests an FP condition code and then conditionally move rs to rd.
+ // We do not currently use any FPU cc bit other than bit 0.
+ ASSERT(cc == 0);
+ ASSERT(!(rs.is(t8) || rd.is(t8)));
+ Label done;
+ Register scratch = t8;
+ // For testing purposes we need to fetch content of the FCSR register and
+ // than test its cc (floating point condition code) bit (for cc = 0, it is
+ // 24. bit of the FCSR).
+ cfc1(scratch, FCSR);
+ // For the MIPS I, II and III architectures, the contents of scratch is
+ // UNPREDICTABLE for the instruction immediately following CFC1.
+ nop();
+ srl(scratch, scratch, 16);
+ andi(scratch, scratch, 0x0080);
+ Branch(&done, ne, scratch, Operand(zero_reg));
+ mov(rd, rs);
+ bind(&done);
+ } else {
+ movf(rd, rs, cc);
+ }
+}
+
+
+void MacroAssembler::Clz(Register rd, Register rs) {
+ if (kArchVariant == kLoongson) {
+ ASSERT(!(rd.is(t8) || rd.is(t9)) && !(rs.is(t8) || rs.is(t9)));
+ Register mask = t8;
+ Register scratch = t9;
+ Label loop, end;
+ mov(at, rs);
+ mov(rd, zero_reg);
+ lui(mask, 0x8000);
+ bind(&loop);
+ and_(scratch, at, mask);
+ Branch(&end, ne, scratch, Operand(zero_reg));
+ addiu(rd, rd, 1);
+ Branch(&loop, ne, mask, Operand(zero_reg), USE_DELAY_SLOT);
+ srl(mask, mask, 1);
+ bind(&end);
+ } else {
+ clz(rd, rs);
+ }
+}
+
+
// Tries to get a signed int32 out of a double precision floating point heap
// number. Rounds towards 0. Branch to 'not_int32' if the double is out of the
// 32bits signed integer range.
@@ -1159,7 +1327,7 @@ void MacroAssembler::ConvertToInt32(Register source,
Branch(not_int32, gt, scratch2, Operand(non_smi_exponent));
// We know the exponent is smaller than 30 (biased). If it is less than
- // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, ie
+ // 0 (biased) then the number is smaller in magnitude than 1.0 * 2^0, i.e.
// it rounds to zero.
const uint32_t zero_exponent =
(HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
@@ -1217,8 +1385,8 @@ void MacroAssembler::ConvertToInt32(Register source,
subu(scratch2, zero_reg, scratch);
// Trick to check sign bit (msb) held in dest, count leading zero.
// 0 indicates negative, save negative version with conditional move.
- clz(dest, dest);
- movz(scratch, scratch2, dest);
+ Clz(dest, dest);
+ Movz(scratch, scratch2, dest);
mov(dest, scratch);
}
bind(&done);
@@ -1249,16 +1417,16 @@ void MacroAssembler::EmitFPUTruncate(FPURoundingMode rounding_mode,
// Do operation based on rounding mode.
switch (rounding_mode) {
case kRoundToNearest:
- round_w_d(result, double_input);
+ Round_w_d(result, double_input);
break;
case kRoundToZero:
- trunc_w_d(result, double_input);
+ Trunc_w_d(result, double_input);
break;
case kRoundToPlusInf:
- ceil_w_d(result, double_input);
+ Ceil_w_d(result, double_input);
break;
case kRoundToMinusInf:
- floor_w_d(result, double_input);
+ Floor_w_d(result, double_input);
break;
} // End of switch-statement.
@@ -1285,7 +1453,7 @@ void MacroAssembler::EmitOutOfInt32RangeTruncate(Register result,
// Check for Infinity and NaNs, which should return 0.
Subu(scratch, result, HeapNumber::kExponentMask);
- movz(result, zero_reg, scratch);
+ Movz(result, zero_reg, scratch);
Branch(&done, eq, scratch, Operand(zero_reg));
// Express exponent as delta to (number of mantissa bits + 31).
@@ -1349,7 +1517,7 @@ void MacroAssembler::EmitOutOfInt32RangeTruncate(Register result,
result = sign;
sign = no_reg;
Subu(result, zero_reg, input_high);
- movz(result, input_high, scratch);
+ Movz(result, input_high, scratch);
bind(&done);
}
@@ -1478,6 +1646,16 @@ void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
}
+void MacroAssembler::Branch(Label* L,
+ Condition cond,
+ Register rs,
+ Heap::RootListIndex index,
+ BranchDelaySlot bdslot) {
+ LoadRoot(at, index);
+ Branch(L, cond, rs, Operand(at), bdslot);
+}
+
+
void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) {
b(offset);
@@ -2269,8 +2447,15 @@ void MacroAssembler::Jump(intptr_t target,
Register rs,
const Operand& rt,
BranchDelaySlot bd) {
+ Label skip;
+ if (cond != cc_always) {
+ Branch(USE_DELAY_SLOT, &skip, NegateCondition(cond), rs, rt);
+ }
+ // The first instruction of 'li' may be placed in the delay slot.
+ // This is not an issue, t9 is expected to be clobbered anyway.
li(t9, Operand(target, rmode));
- Jump(t9, cond, rs, rt, bd);
+ Jump(t9, al, zero_reg, Operand(zero_reg), bd);
+ bind(&skip);
}
@@ -2365,7 +2550,7 @@ void MacroAssembler::Call(Address target,
// Must record previous source positions before the
// li() generates a new code target.
positions_recorder()->WriteRecordedPositions();
- li(t9, Operand(target_int, rmode), true);
+ li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Call(t9, cond, rs, rt, bd);
ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd),
SizeOfCodeGeneratedSince(&start));
@@ -2400,7 +2585,7 @@ void MacroAssembler::Call(Handle<Code> code,
rmode = RelocInfo::CODE_TARGET_WITH_ID;
}
Call(reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
- ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt),
+ ASSERT_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
SizeOfCodeGeneratedSince(&start));
}
@@ -2470,14 +2655,16 @@ void MacroAssembler::Jalr(Label* L, BranchDelaySlot bdslot) {
nop();
}
+void MacroAssembler::DropAndRet(int drop) {
+ Ret(USE_DELAY_SLOT);
+ addiu(sp, sp, drop * kPointerSize);
+}
void MacroAssembler::DropAndRet(int drop,
Condition cond,
Register r1,
const Operand& r2) {
- // This is a workaround to make sure only one branch instruction is
- // generated. It relies on Drop and Ret not creating branches if
- // cond == cc_always.
+ // Both Drop and Ret need to be conditional.
Label skip;
if (cond != cc_always) {
Branch(&skip, NegateCondition(cond), r1, r2);
@@ -2544,8 +2731,8 @@ void MacroAssembler::Push(Handle<Object> handle) {
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::DebugBreak() {
- mov(a0, zero_reg);
- li(a1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
+ PrepareCEntryArgs(0);
+ PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
CEntryStub ces(1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
@@ -2557,61 +2744,43 @@ void MacroAssembler::DebugBreak() {
// ---------------------------------------------------------------------------
// Exception handling.
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type) {
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
+ int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
-
- // The return address is passed in register ra.
- if (try_location == IN_JAVASCRIPT) {
- if (type == TRY_CATCH_HANDLER) {
- li(t0, Operand(StackHandler::TRY_CATCH));
- } else {
- li(t0, Operand(StackHandler::TRY_FINALLY));
- }
- // Save the current handler as the next handler.
- li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
- lw(t1, MemOperand(t2));
-
- addiu(sp, sp, -StackHandlerConstants::kSize);
- sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
- sw(fp, MemOperand(sp, StackHandlerConstants::kFPOffset));
- sw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
- sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
- sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
-
- // Link this handler as the new current one.
- sw(sp, MemOperand(t2));
-
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // For the JSEntry handler, we must preserve a0-a3 and s0.
+ // t1-t3 are available. We will build up the handler from the bottom by
+ // pushing on the stack.
+ // Set up the code object (t1) and the state (t2) for pushing.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
+ li(t1, Operand(CodeObject()), CONSTANT_SIZE);
+ li(t2, Operand(state));
+
+ // Push the frame pointer, context, state, and code object.
+ if (kind == StackHandler::JS_ENTRY) {
+ ASSERT_EQ(Smi::FromInt(0), 0);
+ // The second zero_reg indicates no context.
+ // The first zero_reg is the NULL frame pointer.
+ // The operands are reversed to match the order of MultiPush/Pop.
+ Push(zero_reg, zero_reg, t2, t1);
} else {
- // Must preserve a0-a3, and s0 (argv).
- ASSERT(try_location == IN_JS_ENTRY);
- // The frame pointer does not point to a JS frame so we save NULL
- // for fp. We expect the code throwing an exception to check fp
- // before dereferencing it to restore the context.
- li(t0, Operand(StackHandler::ENTRY));
-
- // Save the current handler as the next handler.
- li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
- lw(t1, MemOperand(t2));
-
- ASSERT(Smi::FromInt(0) == 0); // Used for no context.
-
- addiu(sp, sp, -StackHandlerConstants::kSize);
- sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
- sw(zero_reg, MemOperand(sp, StackHandlerConstants::kFPOffset));
- sw(zero_reg, MemOperand(sp, StackHandlerConstants::kContextOffset));
- sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
- sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
-
- // Link this handler as the new current one.
- sw(sp, MemOperand(t2));
+ MultiPush(t1.bit() | t2.bit() | cp.bit() | fp.bit());
}
+
+ // Link the current handler as the next handler.
+ li(t2, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
+ lw(t1, MemOperand(t2));
+ push(t1);
+ // Set this new handler as the current one.
+ sw(sp, MemOperand(t2));
}
@@ -2624,19 +2793,36 @@ void MacroAssembler::PopTryHandler() {
}
-void MacroAssembler::Throw(Register value) {
- // v0 is expected to hold the exception.
- Move(v0, value);
+void MacroAssembler::JumpToHandlerEntry() {
+ // Compute the handler entry address and jump to it. The handler table is
+ // a fixed array of (smi-tagged) code offsets.
+ // v0 = exception, a1 = code object, a2 = state.
+ lw(a3, FieldMemOperand(a1, Code::kHandlerTableOffset)); // Handler table.
+ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ srl(a2, a2, StackHandler::kKindWidth); // Handler index.
+ sll(a2, a2, kPointerSizeLog2);
+ Addu(a2, a3, a2);
+ lw(a2, MemOperand(a2)); // Smi-tagged offset.
+ Addu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start.
+ sra(t9, a2, kSmiTagSize);
+ Addu(t9, t9, a1);
+ Jump(t9); // Jump.
+}
+
+void MacroAssembler::Throw(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in v0.
+ Move(v0, value);
- // Drop the sp to the top of the handler.
+ // Drop the stack pointer to the top of the top handler.
li(a3, Operand(ExternalReference(Isolate::kHandlerAddress,
isolate())));
lw(sp, MemOperand(a3));
@@ -2645,132 +2831,60 @@ void MacroAssembler::Throw(Register value) {
pop(a2);
sw(a2, MemOperand(a3));
- // Restore context and frame pointer, discard state (a3).
- MultiPop(a3.bit() | cp.bit() | fp.bit());
+ // Get the code object (a1) and state (a2). Restore the context and frame
+ // pointer.
+ MultiPop(a1.bit() | a2.bit() | cp.bit() | fp.bit());
// If the handler is a JS frame, restore the context to the frame.
- // (a3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any
- // of them.
+ // (kind == ENTRY) == (fp == 0) == (cp == 0), so we could test either fp
+ // or cp.
Label done;
- Branch(&done, eq, fp, Operand(zero_reg));
+ Branch(&done, eq, cp, Operand(zero_reg));
sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
bind(&done);
-#ifdef DEBUG
- // When emitting debug_code, set ra as return address for the jump.
- // 5 instructions: add: 1, pop: 2, jump: 2.
- const int kOffsetRaInstructions = 5;
- Label find_ra;
-
- if (emit_debug_code()) {
- // Compute ra for the Jump(t9).
- const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize;
-
- // This branch-and-link sequence is needed to get the current PC on mips,
- // saved to the ra register. Then adjusted for instruction count.
- bal(&find_ra); // bal exposes branch-delay.
- nop(); // Branch delay slot nop.
- bind(&find_ra);
- addiu(ra, ra, kOffsetRaBytes);
- }
-#endif
-
- pop(t9); // 2 instructions: lw, add sp.
- Jump(t9); // 2 instructions: jr, nop (in delay slot).
-
- if (emit_debug_code()) {
- // Make sure that the expected number of instructions were generated.
- ASSERT_EQ(kOffsetRaInstructions,
- InstructionsGeneratedSince(&find_ra));
- }
+ JumpToHandlerEntry();
}
-void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
- Register value) {
+void MacroAssembler::ThrowUncatchable(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
-
- // v0 is expected to hold the exception.
- Move(v0, value);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
- // Drop sp to the top stack handler.
+ // The exception is expected in v0.
+ if (!value.is(v0)) {
+ mov(v0, value);
+ }
+ // Drop the stack pointer to the top of the top stack handler.
li(a3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
lw(sp, MemOperand(a3));
// Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- lw(a2, MemOperand(sp, kStateOffset));
- Branch(&done, eq, a2, Operand(StackHandler::ENTRY));
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- lw(sp, MemOperand(sp, kNextOffset));
- jmp(&loop);
- bind(&done);
-
- // Set the top handler address to next handler past the current ENTRY handler.
+ Label fetch_next, check_kind;
+ jmp(&check_kind);
+ bind(&fetch_next);
+ lw(sp, MemOperand(sp, StackHandlerConstants::kNextOffset));
+
+ bind(&check_kind);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
+ lw(a2, MemOperand(sp, StackHandlerConstants::kStateOffset));
+ And(a2, a2, Operand(StackHandler::KindField::kMask));
+ Branch(&fetch_next, ne, a2, Operand(zero_reg));
+
+ // Set the top handler address to next handler past the top ENTRY handler.
pop(a2);
sw(a2, MemOperand(a3));
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(
- Isolate::kExternalCaughtExceptionAddress, isolate());
- li(a0, Operand(false, RelocInfo::NONE));
- li(a2, Operand(external_caught));
- sw(a0, MemOperand(a2));
-
- // Set pending exception and v0 to out of memory exception.
- Failure* out_of_memory = Failure::OutOfMemoryException();
- li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate())));
- sw(v0, MemOperand(a2));
- }
-
- // Stack layout at this point. See also StackHandlerConstants.
- // sp -> state (ENTRY)
- // cp
- // fp
- // ra
-
- // Restore context and frame pointer, discard state (r2).
- MultiPop(a2.bit() | cp.bit() | fp.bit());
-
-#ifdef DEBUG
- // When emitting debug_code, set ra as return address for the jump.
- // 5 instructions: add: 1, pop: 2, jump: 2.
- const int kOffsetRaInstructions = 5;
- Label find_ra;
-
- if (emit_debug_code()) {
- // Compute ra for the Jump(t9).
- const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize;
+ // Get the code object (a1) and state (a2). Clear the context and frame
+ // pointer (0 was saved in the handler).
+ MultiPop(a1.bit() | a2.bit() | cp.bit() | fp.bit());
- // This branch-and-link sequence is needed to get the current PC on mips,
- // saved to the ra register. Then adjusted for instruction count.
- bal(&find_ra); // bal exposes branch-delay slot.
- nop(); // Branch delay slot nop.
- bind(&find_ra);
- addiu(ra, ra, kOffsetRaBytes);
- }
-#endif
- pop(t9); // 2 instructions: lw, add sp.
- Jump(t9); // 2 instructions: jr, nop (in delay slot).
-
- if (emit_debug_code()) {
- // Make sure that the expected number of instructions were generated.
- ASSERT_EQ(kOffsetRaInstructions,
- InstructionsGeneratedSince(&find_ra));
- }
+ JumpToHandlerEntry();
}
@@ -3276,7 +3390,7 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
// Ensure that the object is a heap number
CheckMap(value_reg,
scratch1,
- isolate()->factory()->heap_number_map(),
+ Heap::kHeapNumberMapRootIndex,
fail,
DONT_DO_SMI_CHECK);
@@ -3345,17 +3459,51 @@ void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
}
+void MacroAssembler::CompareMapAndBranch(Register obj,
+ Register scratch,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode) {
+ lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
+ Operand right = Operand(map);
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ Map* transitioned_fast_element_map(
+ map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
+ ASSERT(transitioned_fast_element_map == NULL ||
+ map->elements_kind() != FAST_ELEMENTS);
+ if (transitioned_fast_element_map != NULL) {
+ Branch(early_success, eq, scratch, right);
+ right = Operand(Handle<Map>(transitioned_fast_element_map));
+ }
+
+ Map* transitioned_double_map(
+ map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
+ ASSERT(transitioned_double_map == NULL ||
+ map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+ if (transitioned_double_map != NULL) {
+ Branch(early_success, eq, scratch, right);
+ right = Operand(Handle<Map>(transitioned_double_map));
+ }
+ }
+
+ Branch(branch_to, cond, scratch, right);
+}
+
+
void MacroAssembler::CheckMap(Register obj,
Register scratch,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type) {
+ SmiCheckType smi_check_type,
+ CompareMapMode mode) {
if (smi_check_type == DO_SMI_CHECK) {
JumpIfSmi(obj, fail);
}
- lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
- li(at, Operand(map));
- Branch(fail, ne, scratch, Operand(at));
+ Label success;
+ CompareMapAndBranch(obj, scratch, map, &success, ne, fail, mode);
+ bind(&success);
}
@@ -3462,10 +3610,12 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Handle<Code> code_constant,
Register code_reg,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
bool definitely_matches = false;
+ *definitely_mismatches = false;
Label regular_invoke;
// Check whether the expected and actual arguments count match. If not,
@@ -3496,6 +3646,7 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
// arguments.
definitely_matches = true;
} else {
+ *definitely_mismatches = true;
li(a2, Operand(expected.immediate()));
}
}
@@ -3519,7 +3670,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
SetCallKind(t1, call_kind);
Call(adaptor);
call_wrapper.AfterCall();
- jmp(done);
+ if (!*definitely_mismatches) {
+ Branch(done);
+ }
} else {
SetCallKind(t1, call_kind);
Jump(adaptor, RelocInfo::CODE_TARGET);
@@ -3540,21 +3693,25 @@ void MacroAssembler::InvokeCode(Register code,
Label done;
- InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
+ bool definitely_mismatches = false;
+ InvokePrologue(expected, actual, Handle<Code>::null(), code,
+ &done, &definitely_mismatches, flag,
call_wrapper, call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code));
- SetCallKind(t1, call_kind);
- Call(code);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(t1, call_kind);
- Jump(code);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code));
+ SetCallKind(t1, call_kind);
+ Call(code);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(t1, call_kind);
+ Jump(code);
+ }
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
}
- // Continue here if InvokePrologue does handle the invocation due to
- // mismatched parameter counts.
- bind(&done);
}
@@ -3569,18 +3726,22 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
Label done;
- InvokePrologue(expected, actual, code, no_reg, &done, flag,
+ bool definitely_mismatches = false;
+ InvokePrologue(expected, actual, code, no_reg,
+ &done, &definitely_mismatches, flag,
NullCallWrapper(), call_kind);
- if (flag == CALL_FUNCTION) {
- SetCallKind(t1, call_kind);
- Call(code, rmode);
- } else {
- SetCallKind(t1, call_kind);
- Jump(code, rmode);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ SetCallKind(t1, call_kind);
+ Call(code, rmode);
+ } else {
+ SetCallKind(t1, call_kind);
+ Jump(code, rmode);
+ }
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
}
- // Continue here if InvokePrologue does handle the invocation due to
- // mismatched parameter counts.
- bind(&done);
}
@@ -3613,12 +3774,13 @@ void MacroAssembler::InvokeFunction(Register function,
void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
const ParameterCount& actual,
InvokeFlag flag,
+ const CallWrapper& call_wrapper,
CallKind call_kind) {
// You can't call a function without a valid frame.
ASSERT(flag == JUMP_FUNCTION || has_frame());
// Get the function and setup the context.
- li(a1, Operand(function));
+ LoadHeapObject(a1, function);
lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
ParameterCount expected(function->shared()->formal_parameter_count());
@@ -3626,7 +3788,7 @@ void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
// allow recompilation to take effect without changing any of the
// call sites.
lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
- InvokeCode(a3, expected, actual, flag, NullCallWrapper(), call_kind);
+ InvokeCode(a3, expected, actual, flag, call_wrapper, call_kind);
}
@@ -3732,10 +3894,13 @@ void MacroAssembler::GetObjectType(Register object,
// -----------------------------------------------------------------------------
// Runtime calls.
-void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
- Register r1, const Operand& r2) {
+void MacroAssembler::CallStub(CodeStub* stub,
+ Condition cond,
+ Register r1,
+ const Operand& r2,
+ BranchDelaySlot bd) {
ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
- Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2);
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond, r1, r2, bd);
}
@@ -3818,8 +3983,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
lw(t1, MemOperand(at));
Branch(&promote_scheduled_exception, ne, t0, Operand(t1));
li(s0, Operand(stack_space));
- LeaveExitFrame(false, s0);
- Ret();
+ LeaveExitFrame(false, s0, true);
bind(&promote_scheduled_exception);
TailCallExternalReference(
@@ -4017,8 +4181,8 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- li(a0, num_arguments);
- li(a1, Operand(ExternalReference(f, isolate())));
+ PrepareCEntryArgs(num_arguments);
+ PrepareCEntryFunction(ExternalReference(f, isolate()));
CEntryStub stub(1);
CallStub(&stub);
}
@@ -4026,8 +4190,8 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
const Runtime::Function* function = Runtime::FunctionForId(id);
- li(a0, Operand(function->nargs));
- li(a1, Operand(ExternalReference(function, isolate())));
+ PrepareCEntryArgs(function->nargs);
+ PrepareCEntryFunction(ExternalReference(function, isolate()));
CEntryStub stub(1, kSaveFPRegs);
CallStub(&stub);
}
@@ -4039,12 +4203,13 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
void MacroAssembler::CallExternalReference(const ExternalReference& ext,
- int num_arguments) {
- li(a0, Operand(num_arguments));
- li(a1, Operand(ext));
+ int num_arguments,
+ BranchDelaySlot bd) {
+ PrepareCEntryArgs(num_arguments);
+ PrepareCEntryFunction(ext);
CEntryStub stub(1);
- CallStub(&stub);
+ CallStub(&stub, al, zero_reg, Operand(zero_reg), bd);
}
@@ -4055,7 +4220,7 @@ void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- li(a0, Operand(num_arguments));
+ PrepareCEntryArgs(num_arguments);
JumpToExternalReference(ext);
}
@@ -4069,10 +4234,16 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
}
-void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
- li(a1, Operand(builtin));
+void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
+ BranchDelaySlot bd) {
+ PrepareCEntryFunction(builtin);
CEntryStub stub(1);
- Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
+ Jump(stub.GetCode(),
+ RelocInfo::CODE_TARGET,
+ al,
+ zero_reg,
+ Operand(zero_reg),
+ bd);
}
@@ -4261,6 +4432,46 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
}
+void MacroAssembler::LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match) {
+ // Load the global or builtins object from the current context.
+ lw(scratch, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ lw(scratch, FieldMemOperand(scratch, GlobalObject::kGlobalContextOffset));
+
+ // Check that the function's map is the same as the expected cached map.
+ int expected_index =
+ Context::GetContextMapIndexFromElementsKind(expected_kind);
+ lw(at, MemOperand(scratch, Context::SlotOffset(expected_index)));
+ Branch(no_map_match, ne, map_in_out, Operand(at));
+
+ // Use the transitioned cached map.
+ int trans_index =
+ Context::GetContextMapIndexFromElementsKind(transitioned_kind);
+ lw(map_in_out, MemOperand(scratch, Context::SlotOffset(trans_index)));
+}
+
+
+void MacroAssembler::LoadInitialArrayMap(
+ Register function_in, Register scratch, Register map_out) {
+ ASSERT(!function_in.is(map_out));
+ Label done;
+ lw(map_out, FieldMemOperand(function_in,
+ JSFunction::kPrototypeOrInitialMapOffset));
+ if (!FLAG_smi_only_arrays) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ map_out,
+ scratch,
+ &done);
+ }
+ bind(&done);
+}
+
+
void MacroAssembler::LoadGlobalFunction(int index, Register function) {
// Load the global or builtins object from the current context.
lw(function, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
@@ -4291,7 +4502,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
void MacroAssembler::EnterFrame(StackFrame::Type type) {
addiu(sp, sp, -5 * kPointerSize);
li(t8, Operand(Smi::FromInt(type)));
- li(t9, Operand(CodeObject()));
+ li(t9, Operand(CodeObject()), CONSTANT_SIZE);
sw(ra, MemOperand(sp, 4 * kPointerSize));
sw(fp, MemOperand(sp, 3 * kPointerSize));
sw(cp, MemOperand(sp, 2 * kPointerSize));
@@ -4311,7 +4522,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
void MacroAssembler::EnterExitFrame(bool save_doubles,
int stack_space) {
- // Setup the frame structure on the stack.
+ // Set up the frame structure on the stack.
STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement);
STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset);
STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset);
@@ -4329,13 +4540,14 @@ void MacroAssembler::EnterExitFrame(bool save_doubles,
addiu(sp, sp, -4 * kPointerSize);
sw(ra, MemOperand(sp, 3 * kPointerSize));
sw(fp, MemOperand(sp, 2 * kPointerSize));
- addiu(fp, sp, 2 * kPointerSize); // Setup new frame pointer.
+ addiu(fp, sp, 2 * kPointerSize); // Set up new frame pointer.
if (emit_debug_code()) {
sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
}
- li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot.
+ // Accessed from ExitFrame::code_slot.
+ li(t8, Operand(CodeObject()), CONSTANT_SIZE);
sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
// Save the frame pointer and the context in top.
@@ -4379,7 +4591,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles,
void MacroAssembler::LeaveExitFrame(bool save_doubles,
- Register argument_count) {
+ Register argument_count,
+ bool do_return) {
// Optionally restore all double registers.
if (save_doubles) {
// Remember: we only need to restore every 2nd double FPU value.
@@ -4405,11 +4618,17 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles,
mov(sp, fp); // Respect ABI stack constraint.
lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset));
lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset));
- addiu(sp, sp, 8);
+
if (argument_count.is_valid()) {
sll(t8, argument_count, kPointerSizeLog2);
addu(sp, sp, t8);
}
+
+ if (do_return) {
+ Ret(USE_DELAY_SLOT);
+ // If returning, the instruction in the delay slot will be the addiu below.
+ }
+ addiu(sp, sp, 8);
}
@@ -4474,14 +4693,71 @@ void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
}
+void MacroAssembler::SmiTagCheckOverflow(Register reg, Register overflow) {
+ ASSERT(!reg.is(overflow));
+ mov(overflow, reg); // Save original value.
+ SmiTag(reg);
+ xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
+}
+
+
+void MacroAssembler::SmiTagCheckOverflow(Register dst,
+ Register src,
+ Register overflow) {
+ if (dst.is(src)) {
+ // Fall back to slower case.
+ SmiTagCheckOverflow(dst, overflow);
+ } else {
+ ASSERT(!dst.is(src));
+ ASSERT(!dst.is(overflow));
+ ASSERT(!src.is(overflow));
+ SmiTag(dst, src);
+ xor_(overflow, dst, src); // Overflow if (value ^ 2 * value) < 0.
+ }
+}
+
+
+void MacroAssembler::UntagAndJumpIfSmi(Register dst,
+ Register src,
+ Label* smi_case) {
+ JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT);
+ SmiUntag(dst, src);
+}
+
+
+void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
+ Register src,
+ Label* non_smi_case) {
+ JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT);
+ SmiUntag(dst, src);
+}
+
+void MacroAssembler::JumpIfSmi(Register value,
+ Label* smi_label,
+ Register scratch,
+ BranchDelaySlot bd) {
+ ASSERT_EQ(0, kSmiTag);
+ andi(scratch, value, kSmiTagMask);
+ Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
+}
+
+void MacroAssembler::JumpIfNotSmi(Register value,
+ Label* not_smi_label,
+ Register scratch,
+ BranchDelaySlot bd) {
+ ASSERT_EQ(0, kSmiTag);
+ andi(scratch, value, kSmiTagMask);
+ Branch(bd, not_smi_label, ne, scratch, Operand(zero_reg));
+}
+
+
void MacroAssembler::JumpIfNotBothSmi(Register reg1,
Register reg2,
Label* on_not_both_smi) {
STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(1, kSmiTagMask);
or_(at, reg1, reg2);
- andi(at, at, kSmiTagMask);
- Branch(on_not_both_smi, ne, at, Operand(zero_reg));
+ JumpIfNotSmi(at, on_not_both_smi);
}
@@ -4492,8 +4768,7 @@ void MacroAssembler::JumpIfEitherSmi(Register reg1,
ASSERT_EQ(1, kSmiTagMask);
// Both Smi tags must be 1 (not Smi).
and_(at, reg1, reg2);
- andi(at, at, kSmiTagMask);
- Branch(on_either_smi, eq, at, Operand(zero_reg));
+ JumpIfSmi(at, on_either_smi);
}
@@ -4571,8 +4846,7 @@ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
// Check that neither is a smi.
STATIC_ASSERT(kSmiTag == 0);
And(scratch1, first, Operand(second));
- And(scratch1, scratch1, Operand(kSmiTagMask));
- Branch(failure, eq, scratch1, Operand(zero_reg));
+ JumpIfSmi(scratch1, failure);
JumpIfNonSmisNotBothSequentialAsciiStrings(first,
second,
scratch1,
@@ -4766,6 +5040,34 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
FlushICache(li_location, 2);
}
+void MacroAssembler::GetRelocatedValue(Register li_location,
+ Register value,
+ Register scratch) {
+ lw(value, MemOperand(li_location));
+ if (emit_debug_code()) {
+ And(value, value, kOpcodeMask);
+ Check(eq, "The instruction should be a lui.",
+ value, Operand(LUI));
+ lw(value, MemOperand(li_location));
+ }
+
+ // value now holds a lui instruction. Extract the immediate.
+ sll(value, value, kImm16Bits);
+
+ lw(scratch, MemOperand(li_location, kInstrSize));
+ if (emit_debug_code()) {
+ And(scratch, scratch, kOpcodeMask);
+ Check(eq, "The instruction should be an ori.",
+ scratch, Operand(ORI));
+ lw(scratch, MemOperand(li_location, kInstrSize));
+ }
+ // "scratch" now holds an ori instruction. Extract the immediate.
+ andi(scratch, scratch, kImm16Mask);
+
+ // Merge the results.
+ or_(value, value, scratch);
+}
+
void MacroAssembler::CheckPageFlag(
Register object,
@@ -4879,7 +5181,7 @@ void MacroAssembler::EnsureNotWhite(
And(t8, mask_scratch, load_scratch);
Branch(&done, ne, t8, Operand(zero_reg));
- if (FLAG_debug_code) {
+ if (emit_debug_code()) {
// Check for impossible bit pattern.
Label ok;
// sll may overflow, making the check conservative.
@@ -4971,11 +5273,53 @@ void MacroAssembler::LoadInstanceDescriptors(Register map,
FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
Label not_smi;
JumpIfNotSmi(descriptors, &not_smi);
- li(descriptors, Operand(FACTORY->empty_descriptor_array()));
+ LoadRoot(descriptors, Heap::kEmptyDescriptorArrayRootIndex);
bind(&not_smi);
}
+void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
+ Label next;
+ // Preload a couple of values used in the loop.
+ Register empty_fixed_array_value = t2;
+ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
+ Register empty_descriptor_array_value = t3;
+ LoadRoot(empty_descriptor_array_value,
+ Heap::kEmptyDescriptorArrayRootIndex);
+ mov(a1, a0);
+ bind(&next);
+
+ // Check that there are no elements. Register a1 contains the
+ // current JS object we've reached through the prototype chain.
+ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
+ Branch(call_runtime, ne, a2, Operand(empty_fixed_array_value));
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in a2 for the subsequent
+ // prototype load.
+ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
+ lw(a3, FieldMemOperand(a2, Map::kInstanceDescriptorsOrBitField3Offset));
+ JumpIfSmi(a3, call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (a3). This is the case if the next enumeration
+ // index field does not contain a smi.
+ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumerationIndexOffset));
+ JumpIfSmi(a3, call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ Label check_prototype;
+ Branch(&check_prototype, eq, a1, Operand(a0));
+ lw(a3, FieldMemOperand(a3, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ Branch(call_runtime, ne, a3, Operand(empty_fixed_array_value));
+
+ // Load the prototype from the map and loop if non-null.
+ bind(&check_prototype);
+ lw(a1, FieldMemOperand(a2, Map::kPrototypeOffset));
+ Branch(&next, ne, a1, Operand(null_value));
+}
+
+
void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
ASSERT(!output_reg.is(input_reg));
Label done;
diff --git a/src/3rdparty/v8/src/mips/macro-assembler-mips.h b/src/3rdparty/v8/src/mips/macro-assembler-mips.h
index 0a78f6d..f57418f 100644
--- a/src/3rdparty/v8/src/mips/macro-assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/macro-assembler-mips.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -51,16 +51,6 @@ class JumpTarget;
// MIPS generated code calls C code, it must be via t9 register.
-// Register aliases.
-// cp is assumed to be a callee saved register.
-const Register lithiumScratchReg = s3; // Scratch register.
-const Register lithiumScratchReg2 = s4; // Scratch register.
-const Register condReg = s5; // Simulated (partial) condition code for mips.
-const Register roots = s6; // Roots array pointer.
-const Register cp = s7; // JavaScript context pointer.
-const Register fp = s8_fp; // Alias for fp.
-const DoubleRegister lithiumScratchDouble = f30; // Double scratch register.
-
// Flags used for the AllocateInNewSpace functions.
enum AllocationFlags {
// No special flags.
@@ -91,6 +81,16 @@ enum BranchDelaySlot {
PROTECT
};
+// Flags used for the li macro-assembler function.
+enum LiFlags {
+ // If the constant value can be represented in just 16 bits, then
+ // optimize the li to use a single instruction, rather than lui/ori pair.
+ OPTIMIZE_SIZE = 0,
+ // Always use 2 instructions (lui/ori pair), even if the constant could
+ // be loaded with just one, so that this value is patchable later.
+ CONSTANT_SIZE = 1
+};
+
enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
@@ -102,30 +102,25 @@ bool AreAliased(Register r1, Register r2, Register r3, Register r4);
// -----------------------------------------------------------------------------
// Static helper functions.
-static MemOperand ContextOperand(Register context, int index) {
+inline MemOperand ContextOperand(Register context, int index) {
return MemOperand(context, Context::SlotOffset(index));
}
-static inline MemOperand GlobalObjectOperand() {
+inline MemOperand GlobalObjectOperand() {
return ContextOperand(cp, Context::GLOBAL_INDEX);
}
-static inline MemOperand QmlGlobalObjectOperand() {
- return ContextOperand(cp, Context::QML_GLOBAL_INDEX);
-}
-
-
// Generate a MemOperand for loading a field from an object.
-static inline MemOperand FieldMemOperand(Register object, int offset) {
+inline MemOperand FieldMemOperand(Register object, int offset) {
return MemOperand(object, offset - kHeapObjectTag);
}
// Generate a MemOperand for storing arguments 5..N on the stack
// when calling CallCFunction().
-static inline MemOperand CFunctionArgumentOperand(int index) {
+inline MemOperand CFunctionArgumentOperand(int index) {
ASSERT(index > kCArgSlotCount);
// Argument 5 takes the slot just past the four Arg-slots.
int offset = (index - 5) * kPointerSize + kCArgsSlotsSize;
@@ -199,6 +194,12 @@ class MacroAssembler: public Assembler {
Ret(cond, rs, rt, bd);
}
+ void Branch(Label* L,
+ Condition cond,
+ Register rs,
+ Heap::RootListIndex index,
+ BranchDelaySlot bdslot = PROTECT);
+
#undef COND_ARGS
// Emit code to discard a non-negative number of pointer-sized elements
@@ -208,10 +209,14 @@ class MacroAssembler: public Assembler {
Register reg = no_reg,
const Operand& op = Operand(no_reg));
- void DropAndRet(int drop = 0,
- Condition cond = cc_always,
- Register reg = no_reg,
- const Operand& op = Operand(no_reg));
+ // Trivial case of DropAndRet that utilizes the delay slot and only emits
+ // 2 instructions.
+ void DropAndRet(int drop);
+
+ void DropAndRet(int drop,
+ Condition cond,
+ Register reg,
+ const Operand& op);
// Swap two registers. If the scratch register is omitted then a slightly
// less efficient form using xor instead of mov is emitted.
@@ -241,7 +246,14 @@ class MacroAssembler: public Assembler {
mtc1(src_high, FPURegister::from_code(dst.code() + 1));
}
+ // Conditional move.
void Move(FPURegister dst, double imm);
+ void Movz(Register rd, Register rs, Register rt);
+ void Movn(Register rd, Register rs, Register rt);
+ void Movt(Register rd, Register rs, uint16_t cc = 0);
+ void Movf(Register rd, Register rs, uint16_t cc = 0);
+
+ void Clz(Register rd, Register rs);
// Jump unconditionally to given label.
// We NEED a nop in the branch delay slot, as it used by v8, for example in
@@ -252,7 +264,6 @@ class MacroAssembler: public Assembler {
Branch(L);
}
-
// Load an object from the root table.
void LoadRoot(Register destination,
Heap::RootListIndex index);
@@ -267,6 +278,15 @@ class MacroAssembler: public Assembler {
Heap::RootListIndex index,
Condition cond, Register src1, const Operand& src2);
+ void LoadHeapObject(Register dst, Handle<HeapObject> object);
+
+ void LoadObject(Register result, Handle<Object> object) {
+ if (object->IsHeapObject()) {
+ LoadHeapObject(result, Handle<HeapObject>::cast(object));
+ } else {
+ li(result, object);
+ }
+ }
// ---------------------------------------------------------------------------
// GC Support
@@ -336,7 +356,7 @@ class MacroAssembler: public Assembler {
Register scratch3,
Label* object_is_white_and_not_data);
- // Detects conservatively whether an object is data-only, ie it does need to
+ // Detects conservatively whether an object is data-only, i.e. it does need to
// be scanned by the garbage collector.
void JumpIfDataObject(Register value,
Register scratch,
@@ -401,6 +421,7 @@ class MacroAssembler: public Assembler {
Register scratch,
Label* miss);
+ void GetNumberHash(Register reg0, Register scratch);
void LoadFromNumberDictionary(Label* miss,
Register elements,
@@ -416,7 +437,7 @@ class MacroAssembler: public Assembler {
}
// Check if the given instruction is a 'type' marker.
- // ie. check if it is a sll zero_reg, zero_reg, <type> (referenced as
+ // i.e. check if it is a sll zero_reg, zero_reg, <type> (referenced as
// nop(type)). These instructions are generated to mark special location in
// the code, like some special IC code.
static inline bool IsMarkedCode(Instr instr, int type) {
@@ -573,12 +594,13 @@ class MacroAssembler: public Assembler {
void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); }
// Load int32 in the rd register.
- void li(Register rd, Operand j, bool gen2instr = false);
- inline void li(Register rd, int32_t j, bool gen2instr = false) {
- li(rd, Operand(j), gen2instr);
+ void li(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE);
+ inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) {
+ li(rd, Operand(j), mode);
}
- inline void li(Register dst, Handle<Object> value, bool gen2instr = false) {
- li(dst, Operand(value), gen2instr);
+ inline void li(Register dst, Handle<Object> value,
+ LiFlags mode = OPTIMIZE_SIZE) {
+ li(dst, Operand(value), mode);
}
// Push multiple registers on the stack.
@@ -697,6 +719,10 @@ class MacroAssembler: public Assembler {
void Trunc_uw_d(FPURegister fd, FPURegister fs, FPURegister scratch);
void Trunc_uw_d(FPURegister fd, Register rs, FPURegister scratch);
+ void Trunc_w_d(FPURegister fd, FPURegister fs);
+ void Round_w_d(FPURegister fd, FPURegister fs);
+ void Floor_w_d(FPURegister fd, FPURegister fs);
+ void Ceil_w_d(FPURegister fd, FPURegister fs);
// Wrapper function for the different cmp/branch types.
void BranchF(Label* target,
Label* nan,
@@ -767,7 +793,9 @@ class MacroAssembler: public Assembler {
int stack_space = 0);
// Leave the current exit frame.
- void LeaveExitFrame(bool save_doubles, Register arg_count);
+ void LeaveExitFrame(bool save_doubles,
+ Register arg_count,
+ bool do_return = false);
// Get the actual activation frame alignment for target environment.
static int ActivationFrameAlignment();
@@ -777,6 +805,22 @@ class MacroAssembler: public Assembler {
void LoadContext(Register dst, int context_chain_length);
+ // Conditionally load the cached Array transitioned map of type
+ // transitioned_kind from the global context if the map in register
+ // map_in_out is the cached Array map in the global context of
+ // expected_kind.
+ void LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match);
+
+ // Load the initial map for new Arrays from a JSFunction.
+ void LoadInitialArrayMap(Register function_in,
+ Register scratch,
+ Register map_out);
+
void LoadGlobalFunction(int index, Register function);
// Load the initial map from the global function. The registers
@@ -785,11 +829,16 @@ class MacroAssembler: public Assembler {
Register map,
Register scratch);
+ void InitializeRootRegister() {
+ ExternalReference roots_array_start =
+ ExternalReference::roots_array_start(isolate());
+ li(kRootRegister, Operand(roots_array_start));
+ }
// -------------------------------------------------------------------------
// JavaScript invokes.
- // Setup call kind marking in t1. The method takes t1 as an
+ // Set up call kind marking in t1. The method takes t1 as an
// explicit first parameter to make the code more readable at the
// call sites.
void SetCallKind(Register dst, CallKind kind);
@@ -820,6 +869,7 @@ class MacroAssembler: public Assembler {
void InvokeFunction(Handle<JSFunction> function,
const ParameterCount& actual,
InvokeFlag flag,
+ const CallWrapper& call_wrapper,
CallKind call_kind);
@@ -848,20 +898,18 @@ class MacroAssembler: public Assembler {
// Exception handling.
// Push a new try handler and link into try handler chain.
- // The return address must be passed in register ra.
- // Clobber t0, t1, t2.
- void PushTryHandler(CodeLocation try_location, HandlerType type);
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
// Must preserve the result register.
void PopTryHandler();
- // Passes thrown value (in v0) to the handler of top of the try handler chain.
+ // Passes thrown value to the handler of top of the try handler chain.
void Throw(Register value);
// Propagates an uncatchable exception to the top of the current JS stack's
// handler chain.
- void ThrowUncatchable(UncatchableExceptionType type, Register value);
+ void ThrowUncatchable(Register value);
// Copies a fixed number of fields of heap objects from src to dst.
void CopyFields(Register dst, Register src, RegList temps, int field_count);
@@ -919,7 +967,8 @@ class MacroAssembler: public Assembler {
// Check to see if maybe_number can be stored as a double in
// FastDoubleElements. If it can, store it at the index specified by key in
- // the FastDoubleElements array elements, otherwise jump to fail.
+ // the FastDoubleElements array elements. Otherwise jump to fail, in which
+ // case scratch2, scratch3 and scratch4 are unmodified.
void StoreNumberToDoubleElements(Register value_reg,
Register key_reg,
Register receiver_reg,
@@ -930,15 +979,29 @@ class MacroAssembler: public Assembler {
Register scratch4,
Label* fail);
- // Check if the map of an object is equal to a specified map (either
- // given directly or as an index into the root list) and branch to
- // label if not. Skip the smi check if not required (object is known
- // to be a heap object).
+ // Compare an object's map with the specified map and its transitioned
+ // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. Jumps to
+ // "branch_to" if the result of the comparison is "cond". If multiple map
+ // compares are required, the compare sequences branches to early_success.
+ void CompareMapAndBranch(Register obj,
+ Register scratch,
+ Handle<Map> map,
+ Label* early_success,
+ Condition cond,
+ Label* branch_to,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
+ // Check if the map of an object is equal to a specified map and branch to
+ // label if not. Skip the smi check if not required (object is known to be a
+ // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
+ // against maps that are ElementsKind transition maps of the specificed map.
void CheckMap(Register obj,
Register scratch,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type);
+ SmiCheckType smi_check_type,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
void CheckMap(Register obj,
Register scratch,
@@ -1043,9 +1106,22 @@ class MacroAssembler: public Assembler {
// -------------------------------------------------------------------------
// Runtime calls.
+ // See comments at the beginning of CEntryStub::Generate.
+ inline void PrepareCEntryArgs(int num_args) {
+ li(s0, num_args);
+ li(s1, (num_args - 1) * kPointerSize);
+ }
+
+ inline void PrepareCEntryFunction(const ExternalReference& ref) {
+ li(s2, Operand(ref));
+ }
+
// Call a code stub.
- void CallStub(CodeStub* stub, Condition cond = cc_always,
- Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
+ void CallStub(CodeStub* stub,
+ Condition cond = cc_always,
+ Register r1 = zero_reg,
+ const Operand& r2 = Operand(zero_reg),
+ BranchDelaySlot bd = PROTECT);
// Tail call a code stub (jump).
void TailCallStub(CodeStub* stub);
@@ -1061,7 +1137,8 @@ class MacroAssembler: public Assembler {
// Convenience function: call an external reference.
void CallExternalReference(const ExternalReference& ext,
- int num_arguments);
+ int num_arguments,
+ BranchDelaySlot bd = PROTECT);
// Tail call of a runtime routine (jump).
// Like JumpToExternalReference, but also takes care of passing the number
@@ -1122,12 +1199,13 @@ class MacroAssembler: public Assembler {
// Calls an API function. Allocates HandleScope, extracts returned value
// from handle and propagates exceptions. Restores context. stack_space
- // - space to be unwound on exit (includes the call js arguments space and
+ // - space to be unwound on exit (includes the call JS arguments space and
// the additional space allocated for the fast call).
void CallApiFunctionAndReturn(ExternalReference function, int stack_space);
// Jump to the builtin routine.
- void JumpToExternalReference(const ExternalReference& builtin);
+ void JumpToExternalReference(const ExternalReference& builtin,
+ BranchDelaySlot bd = PROTECT);
// Invoke specified builtin JavaScript function. Adds an entry to
// the unresolved list if the name does not resolve.
@@ -1202,24 +1280,13 @@ class MacroAssembler: public Assembler {
// -------------------------------------------------------------------------
// Smi utilities.
- // Try to convert int32 to smi. If the value is to large, preserve
- // the original value and jump to not_a_smi. Destroys scratch and
- // sets flags.
- // This is only used by crankshaft atm so it is unimplemented on MIPS.
- void TrySmiTag(Register reg, Label* not_a_smi, Register scratch) {
- UNIMPLEMENTED_MIPS();
- }
-
void SmiTag(Register reg) {
Addu(reg, reg, reg);
}
// Test for overflow < 0: use BranchOnOverflow() or BranchOnNoOverflow().
- void SmiTagCheckOverflow(Register reg, Register overflow) {
- mov(overflow, reg); // Save original value.
- addu(reg, reg, reg);
- xor_(overflow, overflow, reg); // Overflow if (value ^ 2 * value) < 0.
- }
+ void SmiTagCheckOverflow(Register reg, Register overflow);
+ void SmiTagCheckOverflow(Register dst, Register src, Register overflow);
void SmiTag(Register dst, Register src) {
Addu(dst, src, src);
@@ -1233,22 +1300,25 @@ class MacroAssembler: public Assembler {
sra(dst, src, kSmiTagSize);
}
+ // Untag the source value into destination and jump if source is a smi.
+ // Souce and destination can be the same register.
+ void UntagAndJumpIfSmi(Register dst, Register src, Label* smi_case);
+
+ // Untag the source value into destination and jump if source is not a smi.
+ // Souce and destination can be the same register.
+ void UntagAndJumpIfNotSmi(Register dst, Register src, Label* non_smi_case);
+
// Jump the register contains a smi.
- inline void JumpIfSmi(Register value, Label* smi_label,
- Register scratch = at,
- BranchDelaySlot bd = PROTECT) {
- ASSERT_EQ(0, kSmiTag);
- andi(scratch, value, kSmiTagMask);
- Branch(bd, smi_label, eq, scratch, Operand(zero_reg));
- }
+ void JumpIfSmi(Register value,
+ Label* smi_label,
+ Register scratch = at,
+ BranchDelaySlot bd = PROTECT);
// Jump if the register contains a non-smi.
- inline void JumpIfNotSmi(Register value, Label* not_smi_label,
- Register scratch = at) {
- ASSERT_EQ(0, kSmiTag);
- andi(scratch, value, kSmiTagMask);
- Branch(not_smi_label, ne, scratch, Operand(zero_reg));
- }
+ void JumpIfNotSmi(Register value,
+ Label* not_smi_label,
+ Register scratch = at,
+ BranchDelaySlot bd = PROTECT);
// Jump if either of the registers contain a non-smi.
void JumpIfNotBothSmi(Register reg1, Register reg2, Label* on_not_both_smi);
@@ -1327,6 +1397,14 @@ class MacroAssembler: public Assembler {
void PatchRelocatedValue(Register li_location,
Register scratch,
Register new_value);
+ // Get the relocatad value (loaded data) from the lui/ori pair.
+ void GetRelocatedValue(Register li_location,
+ Register value,
+ Register scratch);
+
+ // Expects object in a0 and returns map with validated enum cache
+ // in a0. Assumes that any other register can be used as a scratch.
+ void CheckEnumCache(Register null_value, Label* call_runtime);
private:
void CallCFunctionHelper(Register function,
@@ -1359,6 +1437,7 @@ class MacroAssembler: public Assembler {
Handle<Code> code_constant,
Register code_reg,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind);
@@ -1386,6 +1465,10 @@ class MacroAssembler: public Assembler {
Register bitmap_reg,
Register mask_reg);
+ // Helper for throwing exceptions. Compute a handler address and jump to
+ // it. See the implementation for register usage.
+ void JumpToHandlerEntry();
+
// Compute memory operands for safepoint stack slots.
static int SafepointRegisterStackIndex(int reg_code);
MemOperand SafepointRegisterSlot(Register reg);
diff --git a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
index cb210fe..dde4a65 100644
--- a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -386,7 +386,7 @@ void RegExpMacroAssemblerMIPS::CheckNotBackReferenceIgnoreCase(
// Restore regexp engine registers.
__ MultiPop(regexp_registers_to_retain);
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
// Check if function returned non-zero for success or zero for failure.
@@ -482,6 +482,42 @@ void RegExpMacroAssemblerMIPS::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerMIPS::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ Subu(a0, current_character(), Operand(from));
+ // Unsigned lower-or-same condition.
+ BranchOrBacktrack(on_in_range, ls, a0, Operand(to - from));
+}
+
+
+void RegExpMacroAssemblerMIPS::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ Subu(a0, current_character(), Operand(from));
+ // Unsigned higher condition.
+ BranchOrBacktrack(on_not_in_range, hi, a0, Operand(to - from));
+}
+
+
+void RegExpMacroAssemblerMIPS::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ li(a0, Operand(table));
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ And(a1, current_character(), Operand(kTableSize - 1));
+ __ Addu(a0, a0, a1);
+ } else {
+ __ Addu(a0, a0, current_character());
+ }
+
+ __ lbu(a0, FieldMemOperand(a0, ByteArray::kHeaderSize));
+ BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg));
+}
+
+
bool RegExpMacroAssemblerMIPS::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -678,7 +714,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// string, and store that value in a local variable.
__ mov(t5, a1);
__ li(a1, Operand(1));
- __ movn(a1, zero_reg, t5);
+ __ Movn(a1, zero_reg, t5);
__ sw(a1, MemOperand(frame_pointer(), kAtStart));
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
@@ -698,7 +734,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Initialize backtrack stack pointer.
__ lw(backtrack_stackpointer(), MemOperand(frame_pointer(), kStackHighEnd));
// Initialize code pointer register
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
// Load previous char as initial value of current character register.
Label at_start;
__ lw(a0, MemOperand(frame_pointer(), kAtStart));
@@ -783,7 +819,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// String might have moved: Reload end of string from frame.
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
SafeReturn();
}
@@ -813,7 +849,7 @@ Handle<HeapObject> RegExpMacroAssemblerMIPS::GetCode(Handle<String> source) {
// Otherwise use return value as new stack pointer.
__ mov(backtrack_stackpointer(), v0);
// Restore saved registers and continue.
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
SafeReturn();
}
@@ -1010,7 +1046,7 @@ void RegExpMacroAssemblerMIPS::CallCheckStackGuardState(Register scratch) {
__ PrepareCallCFunction(num_arguments, scratch);
__ mov(a2, frame_pointer());
// Code* of self.
- __ li(a1, Operand(masm_->CodeObject()));
+ __ li(a1, Operand(masm_->CodeObject()), CONSTANT_SIZE);
// a0 becomes return address pointer.
ExternalReference stack_guard_check =
ExternalReference::re_check_stack_guard_state(masm_->isolate());
@@ -1056,7 +1092,7 @@ int RegExpMacroAssemblerMIPS::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid.
int delta = code_handle->address() - re_code->address();
@@ -1229,7 +1265,7 @@ void RegExpMacroAssemblerMIPS::CallCFunctionUsingStub(
if (OS::ActivationFrameAlignment() != 0) {
__ lw(sp, MemOperand(sp, 16));
}
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
}
diff --git a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
index d42d4cf..d167f62 100644
--- a/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
+++ b/src/3rdparty/v8/src/mips/regexp-macro-assembler-mips.h
@@ -81,6 +81,14 @@ class RegExpMacroAssemblerMIPS: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
diff --git a/src/3rdparty/v8/src/mips/simulator-mips.cc b/src/3rdparty/v8/src/mips/simulator-mips.cc
index 0ec3e28..f31ce7e 100644
--- a/src/3rdparty/v8/src/mips/simulator-mips.cc
+++ b/src/3rdparty/v8/src/mips/simulator-mips.cc
@@ -72,7 +72,7 @@ uint32_t get_fcsr_condition_bit(uint32_t cc) {
// code.
class MipsDebugger {
public:
- explicit MipsDebugger(Simulator* sim);
+ explicit MipsDebugger(Simulator* sim) : sim_(sim) { }
~MipsDebugger();
void Stop(Instruction* instr);
@@ -105,10 +105,6 @@ class MipsDebugger {
void RedoBreakpoints();
};
-MipsDebugger::MipsDebugger(Simulator* sim) {
- sim_ = sim;
-}
-
MipsDebugger::~MipsDebugger() {
}
@@ -391,6 +387,13 @@ void MipsDebugger::Debug() {
if (line == NULL) {
break;
} else {
+ char* last_input = sim_->last_debugger_input();
+ if (strcmp(line, "\n") == 0 && last_input != NULL) {
+ line = last_input;
+ } else {
+ // Ownership is transferred to sim_;
+ sim_->set_last_debugger_input(line);
+ }
// Use sscanf to parse the individual parts of the command line. At the
// moment no command expects more than two parameters.
int argc = SScanF(line,
@@ -757,7 +760,6 @@ void MipsDebugger::Debug() {
PrintF("Unknown command: %s\n", cmd);
}
}
- DeleteArray(line);
}
// Add all the breakpoints back to stop execution and enter the debugger
@@ -791,6 +793,12 @@ static bool AllOnOnePage(uintptr_t start, int size) {
}
+void Simulator::set_last_debugger_input(char* input) {
+ DeleteArray(last_debugger_input_);
+ last_debugger_input_ = input;
+}
+
+
void Simulator::FlushICache(v8::internal::HashMap* i_cache,
void* start_addr,
size_t size) {
@@ -880,7 +888,7 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
isolate_->set_simulator_i_cache(i_cache_);
}
Initialize(isolate);
- // Setup simulator support first. Some of this information is needed to
+ // Set up simulator support first. Some of this information is needed to
// setup the architecture state.
stack_ = reinterpret_cast<char*>(malloc(stack_size_));
pc_modified_ = false;
@@ -889,7 +897,7 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
break_pc_ = NULL;
break_instr_ = 0;
- // Setup architecture state.
+ // Set up architecture state.
// All registers are initialized to zero to start with.
for (int i = 0; i < kNumSimuRegisters; i++) {
registers_[i] = 0;
@@ -911,6 +919,8 @@ Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
for (int i = 0; i < kNumExceptions; i++) {
exceptions[i] = 0;
}
+
+ last_debugger_input_ = NULL;
}
@@ -1359,9 +1369,9 @@ void Simulator::WriteB(int32_t addr, int8_t value) {
// Returns the limit of the stack area to enable checking for stack overflows.
uintptr_t Simulator::StackLimit() const {
- // Leave a safety margin of 512 bytes to prevent overrunning the stack when
+ // Leave a safety margin of 1024 bytes to prevent overrunning the stack when
// pushing values.
- return reinterpret_cast<uintptr_t>(stack_) + 512;
+ return reinterpret_cast<uintptr_t>(stack_) + 1024;
}
@@ -1934,7 +1944,7 @@ void Simulator::DecodeTypeRegister(Instruction* instr) {
// Next pc
int32_t next_pc = 0;
- // Setup the variables if needed before executing the instruction.
+ // Set up the variables if needed before executing the instruction.
ConfigureTypeRegister(instr,
alu_out,
i64hilo,
@@ -2281,7 +2291,7 @@ void Simulator::DecodeTypeRegister(Instruction* instr) {
}
-// Type 2: instructions using a 16 bytes immediate. (eg: addi, beq).
+// Type 2: instructions using a 16 bytes immediate. (e.g. addi, beq).
void Simulator::DecodeTypeImmediate(Instruction* instr) {
// Instruction fields.
Opcode op = instr->OpcodeFieldRaw();
@@ -2604,7 +2614,7 @@ void Simulator::DecodeTypeImmediate(Instruction* instr) {
}
-// Type 3: instructions using a 26 bytes immediate. (eg: j, jal).
+// Type 3: instructions using a 26 bytes immediate. (e.g. j, jal).
void Simulator::DecodeTypeJump(Instruction* instr) {
// Get current pc.
int32_t current_pc = get_pc();
@@ -2701,7 +2711,7 @@ void Simulator::Execute() {
int32_t Simulator::Call(byte* entry, int argument_count, ...) {
va_list parameters;
va_start(parameters, argument_count);
- // Setup arguments.
+ // Set up arguments.
// First four arguments passed in registers.
ASSERT(argument_count >= 4);
@@ -2748,7 +2758,7 @@ int32_t Simulator::Call(byte* entry, int argument_count, ...) {
int32_t sp_val = get_register(sp);
int32_t fp_val = get_register(fp);
- // Setup the callee-saved registers with a known value. To be able to check
+ // Set up the callee-saved registers with a known value. To be able to check
// that they are preserved properly across JS execution.
int32_t callee_saved_value = icount_;
set_register(s0, callee_saved_value);
diff --git a/src/3rdparty/v8/src/mips/simulator-mips.h b/src/3rdparty/v8/src/mips/simulator-mips.h
index 69dddfa..1e72939 100644
--- a/src/3rdparty/v8/src/mips/simulator-mips.h
+++ b/src/3rdparty/v8/src/mips/simulator-mips.h
@@ -221,6 +221,10 @@ class Simulator {
// Pop an address from the JS stack.
uintptr_t PopAddress();
+ // Debugger input.
+ void set_last_debugger_input(char* input);
+ char* last_debugger_input() { return last_debugger_input_; }
+
// ICache checking.
static void FlushICache(v8::internal::HashMap* i_cache, void* start,
size_t size);
@@ -305,6 +309,14 @@ class Simulator {
void InstructionDecode(Instruction* instr);
// Execute one instruction placed in a branch delay slot.
void BranchDelayInstructionDecode(Instruction* instr) {
+ if (instr->InstructionBits() == nopInstr) {
+ // Short-cut generic nop instructions. They are always valid and they
+ // never change the simulator state.
+ set_register(pc, reinterpret_cast<int32_t>(instr) +
+ Instruction::kInstrSize);
+ return;
+ }
+
if (instr->IsForbiddenInBranchDelay()) {
V8_Fatal(__FILE__, __LINE__,
"Eror:Unexpected %i opcode in a branch delay slot.",
@@ -358,6 +370,9 @@ class Simulator {
int icount_;
int break_count_;
+ // Debugger input.
+ char* last_debugger_input_;
+
// Icache simulation.
v8::internal::HashMap* i_cache_;
diff --git a/src/3rdparty/v8/src/mips/stub-cache-mips.cc b/src/3rdparty/v8/src/mips/stub-cache-mips.cc
index cb1f092..4d18c94 100644
--- a/src/3rdparty/v8/src/mips/stub-cache-mips.cc
+++ b/src/3rdparty/v8/src/mips/stub-cache-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,51 +43,74 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // Number of the cache entry, not scaled.
Register offset,
Register scratch,
- Register scratch2) {
+ Register scratch2,
+ Register offset_scratch) {
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
+ ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
+ uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
// Check the relative positions of the address fields.
ASSERT(value_off_addr > key_off_addr);
ASSERT((value_off_addr - key_off_addr) % 4 == 0);
ASSERT((value_off_addr - key_off_addr) < (256 * 4));
+ ASSERT(map_off_addr > key_off_addr);
+ ASSERT((map_off_addr - key_off_addr) % 4 == 0);
+ ASSERT((map_off_addr - key_off_addr) < (256 * 4));
Label miss;
- Register offsets_base_addr = scratch;
+ Register base_addr = scratch;
+ scratch = no_reg;
+
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ sll(offset_scratch, offset, 1);
+ __ Addu(offset_scratch, offset_scratch, offset);
+
+ // Calculate the base address of the entry.
+ __ li(base_addr, Operand(key_offset));
+ __ sll(at, offset_scratch, kPointerSizeLog2);
+ __ Addu(base_addr, base_addr, at);
// Check that the key in the entry matches the name.
- __ li(offsets_base_addr, Operand(key_offset));
- __ sll(scratch2, offset, 1);
- __ addu(scratch2, offsets_base_addr, scratch2);
- __ lw(scratch2, MemOperand(scratch2));
- __ Branch(&miss, ne, name, Operand(scratch2));
+ __ lw(at, MemOperand(base_addr, 0));
+ __ Branch(&miss, ne, name, Operand(at));
+
+ // Check the map matches.
+ __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
+ __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ Branch(&miss, ne, at, Operand(scratch2));
// Get the code entry from the cache.
- __ Addu(offsets_base_addr, offsets_base_addr,
- Operand(value_off_addr - key_off_addr));
- __ sll(scratch2, offset, 1);
- __ addu(scratch2, offsets_base_addr, scratch2);
- __ lw(scratch2, MemOperand(scratch2));
+ Register code = scratch2;
+ scratch2 = no_reg;
+ __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
// Check that the flags match what we're looking for.
- __ lw(scratch2, FieldMemOperand(scratch2, Code::kFlagsOffset));
- __ And(scratch2, scratch2, Operand(~Code::kFlagsNotUsedInLookup));
- __ Branch(&miss, ne, scratch2, Operand(flags));
+ Register flags_reg = base_addr;
+ base_addr = no_reg;
+ __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
+ __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
+ __ Branch(&miss, ne, flags_reg, Operand(flags));
- // Re-load code entry from cache.
- __ sll(offset, offset, 1);
- __ addu(offset, offset, offsets_base_addr);
- __ lw(offset, MemOperand(offset));
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
// Jump to the first instruction in the code stub.
- __ Addu(offset, offset, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(offset);
+ __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
// Miss: fall through.
__ bind(&miss);
@@ -157,13 +180,14 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 8.
- ASSERT(sizeof(Entry) == 8);
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 12.
+ ASSERT(sizeof(Entry) == 12);
// Make sure the flags does not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -179,39 +203,66 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
ASSERT(!extra2.is(scratch));
ASSERT(!extra2.is(extra));
- // Check scratch, extra and extra2 registers are valid.
+ // Check register validity.
ASSERT(!scratch.is(no_reg));
ASSERT(!extra.is(no_reg));
ASSERT(!extra2.is(no_reg));
+ ASSERT(!extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
+ extra2, extra3);
// Check that the receiver isn't a smi.
- __ JumpIfSmi(receiver, &miss, t0);
+ __ JumpIfSmi(receiver, &miss);
// Get the map of the receiver and compute the hash.
__ lw(scratch, FieldMemOperand(name, String::kHashFieldOffset));
- __ lw(t8, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ Addu(scratch, scratch, Operand(t8));
- __ Xor(scratch, scratch, Operand(flags));
- __ And(scratch,
- scratch,
- Operand((kPrimaryTableSize - 1) << kHeapObjectTagSize));
+ __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
+ __ Addu(scratch, scratch, at);
+ uint32_t mask = kPrimaryTableSize - 1;
+ // We shift out the last two bits because they are not part of the hash and
+ // they are always 01 for maps.
+ __ srl(scratch, scratch, kHeapObjectTagSize);
+ __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
+ __ And(scratch, scratch, Operand(mask));
// Probe the primary table.
- ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kPrimary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Primary miss: Compute hash for secondary probe.
- __ Subu(scratch, scratch, Operand(name));
- __ Addu(scratch, scratch, Operand(flags));
- __ And(scratch,
- scratch,
- Operand((kSecondaryTableSize - 1) << kHeapObjectTagSize));
+ __ srl(at, name, kHeapObjectTagSize);
+ __ Subu(scratch, scratch, at);
+ uint32_t mask2 = kSecondaryTableSize - 1;
+ __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
+ __ And(scratch, scratch, Operand(mask2));
// Probe the secondary table.
- ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra, extra2);
+ ProbeTable(isolate,
+ masm,
+ flags,
+ kSecondary,
+ receiver,
+ name,
+ scratch,
+ extra,
+ extra2,
+ extra3);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
+ extra2, extra3);
}
@@ -282,8 +333,7 @@ void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
Register scratch,
Label* miss_label) {
// Check that the receiver isn't a smi.
- __ And(scratch, receiver, Operand(kSmiTagMask));
- __ Branch(miss_label, eq, scratch, Operand(zero_reg));
+ __ JumpIfSmi(receiver, miss_label);
// Check that the object is a JS array.
__ GetObjectType(receiver, scratch, scratch);
@@ -378,13 +428,9 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Label* miss_label) {
// a0 : value.
Label exit;
-
- // Check that the receiver isn't a smi.
- __ JumpIfSmi(receiver_reg, miss_label, scratch);
-
- // Check that the map of the receiver hasn't changed.
- __ lw(scratch, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
- __ Branch(miss_label, ne, scratch, Operand(Handle<Map>(object->map())));
+ // Check that the map of the object hasn't changed.
+ __ CheckMap(receiver_reg, scratch, Handle<Map>(object->map()), miss_label,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -531,8 +577,8 @@ static void CompileCallLoadPropertyWithInterceptor(
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
masm->isolate());
- __ li(a0, Operand(5));
- __ li(a1, Operand(ref));
+ __ PrepareCEntryArgs(5);
+ __ PrepareCEntryFunction(ref);
CEntryStub stub(1);
__ CallStub(&stub);
@@ -566,16 +612,16 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
int argc) {
// ----------- S t a t e -------------
// -- sp[0] : holder (set by CheckPrototypes)
- // -- sp[4] : callee js function
+ // -- sp[4] : callee JS function
// -- sp[8] : call data
- // -- sp[12] : last js argument
+ // -- sp[12] : last JS argument
// -- ...
- // -- sp[(argc + 3) * 4] : first js argument
+ // -- sp[(argc + 3) * 4] : first JS argument
// -- sp[(argc + 4) * 4] : receiver
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
- __ li(t1, Operand(function));
+ __ LoadHeapObject(t1, function);
__ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
// Pass the additional arguments FastHandleApiCall expects.
@@ -588,7 +634,7 @@ static void GenerateFastApiDirectCall(MacroAssembler* masm,
__ li(t2, call_data);
}
- // Store js function and call data.
+ // Store JS function and call data.
__ sw(t1, MemOperand(sp, 1 * kPointerSize));
__ sw(t2, MemOperand(sp, 2 * kPointerSize));
@@ -748,7 +794,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
__ InvokeFunction(optimization.constant_function(), arguments_,
- JUMP_FUNCTION, call_kind);
+ JUMP_FUNCTION, NullCallWrapper(), call_kind);
}
// Deferred code for fast API call case---clean preallocated space.
@@ -897,7 +943,7 @@ static void StoreIntAsFloat(MacroAssembler* masm,
__ And(fval, ival, Operand(kBinary32SignMask));
// Negate value if it is negative.
__ subu(scratch1, zero_reg, ival);
- __ movn(ival, scratch1, fval);
+ __ Movn(ival, scratch1, fval);
// We have -1, 0 or 1, which we treat specially. Register ival contains
// absolute value: it is either equal to 1 (special case of -1 and 1),
@@ -911,14 +957,14 @@ static void StoreIntAsFloat(MacroAssembler* masm,
__ Xor(scratch1, ival, Operand(1));
__ li(scratch2, exponent_word_for_1);
__ or_(scratch2, fval, scratch2);
- __ movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
+ __ Movz(fval, scratch2, scratch1); // Only if ival is equal to 1.
__ Branch(&done);
__ bind(&not_special);
// Count leading zeros.
// Gets the wrong answer for 0, but we already checked for that case above.
Register zeros = scratch2;
- __ clz(zeros, ival);
+ __ Clz(zeros, ival);
// Compute exponent and or it into the exponent register.
__ li(scratch1, (kBitsPerInt - 1) + kBinary32ExponentBias);
@@ -1038,9 +1084,8 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
__ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
} else {
Handle<Map> current_map(current->map());
- __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
- // Branch on the result of the map check.
- __ Branch(miss, ne, scratch1, Operand(current_map));
+ __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK,
+ ALLOW_ELEMENT_TRANSITION_MAPS);
// Check access rights to the global object. This has to happen after
// the map check so that we know that the object is actually a global
// object.
@@ -1071,8 +1116,8 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
LOG(masm()->isolate(), IntEvent("check-maps-depth", depth + 1));
// Check the holder map.
- __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ Branch(miss, ne, scratch1, Operand(Handle<Map>(current->map())));
+ __ CheckMap(reg, scratch1, Handle<Map>(current->map()), miss,
+ DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform security check for access to the global object.
ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
@@ -1100,8 +1145,7 @@ void StubCompiler::GenerateLoadField(Handle<JSObject> object,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
- __ And(scratch1, receiver, Operand(kSmiTagMask));
- __ Branch(miss, eq, scratch1, Operand(zero_reg));
+ __ JumpIfSmi(receiver, miss);
// Check that the maps haven't changed.
Register reg = CheckPrototypes(
@@ -1117,7 +1161,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
@@ -1129,7 +1173,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
scratch1, scratch2, scratch3, name, miss);
// Return the constant value.
- __ li(v0, Operand(value));
+ __ LoadHeapObject(v0, value);
__ Ret();
}
@@ -1175,7 +1219,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ EnterExitFrame(false, kApiStackSpace);
// Create AccessorInfo instance on the stack above the exit frame with
- // scratch2 (internal::Object **args_) as the data.
+ // scratch2 (internal::Object** args_) as the data.
__ sw(a2, MemOperand(sp, kPointerSize));
// a2 (second argument - see note above) = AccessorInfo&
__ Addu(a2, sp, kPointerSize);
@@ -1211,7 +1255,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// and CALLBACKS, so inline only them, other cases may be added
// later.
bool compile_followup_inline = false;
- if (lookup->IsProperty() && lookup->IsCacheable()) {
+ if (lookup->IsFound() && lookup->IsCacheable()) {
if (lookup->type() == FIELD) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
@@ -1350,14 +1394,8 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
// Get the receiver from the stack.
__ lw(a0, MemOperand(sp, argc * kPointerSize));
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(a0, miss);
- }
-
// Check that the maps haven't changed.
+ __ JumpIfSmi(a0, miss);
CheckPrototypes(object, a0, holder, a3, a1, t0, name, miss);
}
@@ -1475,28 +1513,28 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Ret();
} else {
Label call_builtin;
- Register elements = a3;
- Register end_elements = t1;
- // Get the elements array of the object.
- __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ CheckMap(elements,
- v0,
- Heap::kFixedArrayMapRootIndex,
- &call_builtin,
- DONT_DO_SMI_CHECK);
-
if (argc == 1) { // Otherwise fall through to call the builtin.
Label attempt_to_grow_elements;
+ Register elements = t2;
+ Register end_elements = t1;
+ // Get the elements array of the object.
+ __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ CheckMap(elements,
+ v0,
+ Heap::kFixedArrayMapRootIndex,
+ &call_builtin,
+ DONT_DO_SMI_CHECK);
+
// Get the array's length into v0 and calculate new length.
__ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
STATIC_ASSERT(kSmiTagSize == 1);
STATIC_ASSERT(kSmiTag == 0);
__ Addu(v0, v0, Operand(Smi::FromInt(argc)));
- // Get the element's length.
+ // Get the elements' length.
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
@@ -1510,7 +1548,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@@ -1526,13 +1564,33 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ bind(&with_write_barrier);
- __ lw(t2, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ CheckFastObjectElements(t2, t2, &call_builtin);
+ __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(a3, t3, &not_fast_object);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(&not_fast_object);
+ __ CheckFastSmiOnlyElements(a3, t3, &call_builtin);
+ // edx: receiver
+ // r3: map
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ a3,
+ t3,
+ &call_builtin);
+ __ mov(a2, receiver);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(a3, a3, &call_builtin);
+ }
// Save new length.
__ sw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
- // Push the element.
+ // Store the value.
// We may need a register containing the address end_elements below,
// so write back the value in end_elements.
__ sll(end_elements, v0, kPointerSizeLog2 - kSmiTagSize);
@@ -1580,23 +1638,23 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Addu(end_elements, elements, end_elements);
__ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
__ li(t3, Operand(new_space_allocation_top));
- __ lw(t2, MemOperand(t3));
- __ Branch(&call_builtin, ne, end_elements, Operand(t2));
+ __ lw(a3, MemOperand(t3));
+ __ Branch(&call_builtin, ne, end_elements, Operand(a3));
__ li(t5, Operand(new_space_allocation_limit));
__ lw(t5, MemOperand(t5));
- __ Addu(t2, t2, Operand(kAllocationDelta * kPointerSize));
- __ Branch(&call_builtin, hi, t2, Operand(t5));
+ __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
+ __ Branch(&call_builtin, hi, a3, Operand(t5));
// We fit and could grow elements.
// Update new_space_allocation_top.
- __ sw(t2, MemOperand(t3));
+ __ sw(a3, MemOperand(t3));
// Push the argument.
__ sw(a2, MemOperand(end_elements));
// Fill the rest with holes.
- __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
- __ sw(t2, MemOperand(end_elements, i * kPointerSize));
+ __ sw(a3, MemOperand(end_elements, i * kPointerSize));
}
// Update elements' and array's sizes.
@@ -1679,14 +1737,14 @@ Handle<Code> CallStubCompiler::CompileArrayPopCall(
// expensive shift first, and use an offset later on.
__ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
__ Addu(elements, elements, t1);
- __ lw(v0, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Branch(&call_builtin, eq, v0, Operand(t2));
// Set the array's length.
__ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
// Fill with the hole.
- __ sw(t2, MemOperand(elements, FixedArray::kHeaderSize - kHeapObjectTag));
+ __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
__ Drop(argc + 1);
__ Ret();
@@ -1753,7 +1811,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
Register receiver = a1;
Register index = t1;
- Register scratch = a3;
Register result = v0;
__ lw(receiver, MemOperand(sp, argc * kPointerSize));
if (argc > 0) {
@@ -1764,7 +1821,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
StringCharCodeAtGenerator generator(receiver,
index,
- scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1835,8 +1891,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
Register receiver = v0;
Register index = t1;
- Register scratch1 = a1;
- Register scratch2 = a3;
+ Register scratch = a3;
Register result = v0;
__ lw(receiver, MemOperand(sp, argc * kPointerSize));
if (argc > 0) {
@@ -1847,8 +1902,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
StringCharAtGenerator generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1940,7 +1994,8 @@ Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2073,7 +2128,8 @@ Handle<Code> CallStubCompiler::CompileMathFloorCall(
__ bind(&slow);
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2173,7 +2229,8 @@ Handle<Code> CallStubCompiler::CompileMathAbsCall(
// Tail call the full function. We do not have to patch the receiver
// because the function makes no use of it.
__ bind(&slow);
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, CALL_AS_METHOD);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
__ bind(&miss);
// a2: function name.
@@ -2264,8 +2321,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
// Check that the receiver isn't a smi.
if (check != NUMBER_CHECK) {
- __ And(t1, a1, Operand(kSmiTagMask));
- __ Branch(&miss, eq, t1, Operand(zero_reg));
+ __ JumpIfSmi(a1, &miss);
}
// Make sure that it's okay not to patch the on stack receiver
@@ -2289,7 +2345,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case STRING_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
// Check that the object is a two-byte string or a symbol.
__ GetObjectType(a1, a3, a3);
__ Branch(&miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
@@ -2307,11 +2363,10 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case NUMBER_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a smi or a heap number.
- __ And(t1, a1, Operand(kSmiTagMask));
- __ Branch(&fast, eq, t1, Operand(zero_reg));
+ __ JumpIfSmi(a1, &fast);
__ GetObjectType(a1, a0, a0);
__ Branch(&miss, ne, a0, Operand(HEAP_NUMBER_TYPE));
__ bind(&fast);
@@ -2329,7 +2384,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case BOOLEAN_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a boolean.
__ LoadRoot(t0, Heap::kTrueValueRootIndex);
@@ -2354,7 +2409,8 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
? CALL_AS_FUNCTION
: CALL_AS_METHOD;
- __ InvokeFunction(function, arguments(), JUMP_FUNCTION, call_kind);
+ __ InvokeFunction(
+ function, arguments(), JUMP_FUNCTION, NullCallWrapper(), call_kind);
// Handle call cache miss.
__ bind(&miss);
@@ -2438,7 +2494,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
__ sw(a3, MemOperand(sp, argc * kPointerSize));
}
- // Setup the context (function already in r1).
+ // Set up the context (function already in r1).
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
@@ -2501,12 +2557,9 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(a1, &miss);
-
// Check that the map of the object hasn't changed.
- __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
- __ Branch(&miss, ne, a3, Operand(Handle<Map>(object->map())));
+ __ CheckMap(a1, a3, Handle<Map>(object->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -2548,12 +2601,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(a1, &miss);
-
// Check that the map of the object hasn't changed.
- __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
- __ Branch(&miss, ne, a3, Operand(Handle<Map>(receiver->map())));
+ __ CheckMap(a1, a3, Handle<Map>(receiver->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) {
@@ -2613,15 +2663,7 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
// Store the value in the cell.
__ sw(a0, FieldMemOperand(t0, JSGlobalPropertyCell::kValueOffset));
__ mov(v0, a0); // Stored value must be returned in v0.
-
- // This trashes a0 but the value is returned in v0 anyway.
- __ RecordWriteField(t0,
- JSGlobalPropertyCell::kValueOffset,
- a0,
- a2,
- kRAHasNotBeenSaved,
- kDontSaveFPRegs,
- OMIT_REMEMBERED_SET);
+ // Cells are always rescanned, so no write barrier here.
Counters* counters = masm()->isolate()->counters();
__ IncrementCounter(counters->named_store_global_inline(), 1, a1, a3);
@@ -2717,7 +2759,7 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name) {
// ----------- S t a t e -------------
// -- a0 : receiver
@@ -2771,15 +2813,8 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
// -----------------------------------
Label miss;
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ And(t0, a0, Operand(kSmiTagMask));
- __ Branch(&miss, eq, t0, Operand(zero_reg));
- }
-
// Check that the map of the global has not changed.
+ __ JumpIfSmi(a0, &miss);
CheckPrototypes(object, a0, holder, a3, t0, a1, name, &miss);
// Get the value from the cell.
@@ -2856,7 +2891,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
// ----------- S t a t e -------------
// -- ra : return address
// -- a0 : key
@@ -3062,7 +3097,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
ElementsKind elements_kind = receiver_map->elements_kind();
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
Handle<Code> stub =
- KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+ KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
__ DispatchMap(a2, a3, receiver_map, stub, DO_SMI_CHECK);
@@ -3136,8 +3171,7 @@ Handle<Code> ConstructStubCompiler::CompileConstructStub(
// a1: constructor function
// t7: undefined
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
- __ And(t0, a2, Operand(kSmiTagMask));
- __ Branch(&generic_stub_call, eq, t0, Operand(zero_reg));
+ __ JumpIfSmi(a2, &generic_stub_call);
__ GetObjectType(a2, a3, t0);
__ Branch(&generic_stub_call, ne, t0, Operand(MAP_TYPE));
@@ -3462,7 +3496,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
CpuFeatures::Scope scope(FPU);
__ mtc1(value, f0);
__ cvt_d_w(f0, f0);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
Register dst1 = t2;
@@ -3510,7 +3544,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ Cvt_d_uw(f0, value, f22);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
@@ -3564,7 +3598,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ AllocateHeapNumber(v0, t3, t5, t6, &slow);
// The float (single) value is already in fpu reg f0 (if we use float).
__ cvt_d_s(f0, f0);
- __ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
+ __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
__ Ret();
} else {
// Allocate a HeapNumber for the result. Don't use a0 and a1 as
@@ -3589,7 +3623,7 @@ void KeyedLoadStubCompiler::GenerateLoadExternalArray(
__ li(t0, 0x7ff);
__ Xor(t1, t5, Operand(0xFF));
- __ movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
+ __ Movz(t5, t0, t1); // Set t5 to 0x7ff only if t5 is equal to 0xff.
__ Branch(&exponent_rebiased, eq, t0, Operand(0xff));
// Rebias exponent.
@@ -3883,7 +3917,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ xor_(t1, t6, t5);
__ li(t2, kBinary32ExponentMask);
- __ movz(t6, t2, t1); // Only if t6 is equal to t5.
+ __ Movz(t6, t2, t1); // Only if t6 is equal to t5.
__ Branch(&nan_or_infinity_or_zero, eq, t6, Operand(t5));
// Rebias exponent.
@@ -3896,12 +3930,12 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ Slt(t1, t1, t6);
__ And(t2, t3, Operand(HeapNumber::kSignMask));
__ Or(t2, t2, Operand(kBinary32ExponentMask));
- __ movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
+ __ Movn(t3, t2, t1); // Only if t6 is gt kBinary32MaxExponent.
__ Branch(&done, gt, t6, Operand(kBinary32MaxExponent));
__ Slt(t1, t6, Operand(kBinary32MinExponent));
__ And(t2, t3, Operand(HeapNumber::kSignMask));
- __ movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
+ __ Movn(t3, t2, t1); // Only if t6 is lt kBinary32MinExponent.
__ Branch(&done, lt, t6, Operand(kBinary32MinExponent));
__ And(t7, t3, Operand(HeapNumber::kSignMask));
@@ -3951,11 +3985,11 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
// and infinities. All these should be converted to 0.
__ li(t5, HeapNumber::kExponentMask);
__ and_(t6, t3, t5);
- __ movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
+ __ Movz(t3, zero_reg, t6); // Only if t6 is equal to zero.
__ Branch(&done, eq, t6, Operand(zero_reg));
__ xor_(t2, t6, t5);
- __ movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
+ __ Movz(t3, zero_reg, t2); // Only if t6 is equal to t5.
__ Branch(&done, eq, t6, Operand(t5));
// Unbias exponent.
@@ -3963,13 +3997,13 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ Subu(t6, t6, Operand(HeapNumber::kExponentBias));
// If exponent is negative then result is 0.
__ slt(t2, t6, zero_reg);
- __ movn(t3, zero_reg, t2); // Only if exponent is negative.
+ __ Movn(t3, zero_reg, t2); // Only if exponent is negative.
__ Branch(&done, lt, t6, Operand(zero_reg));
// If exponent is too big then result is minimal value.
__ slti(t1, t6, meaningfull_bits - 1);
__ li(t2, min_value);
- __ movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
+ __ Movz(t3, t2, t1); // Only if t6 is ge meaningfull_bits - 1.
__ Branch(&done, ge, t6, Operand(meaningfull_bits - 1));
__ And(t5, t3, Operand(HeapNumber::kSignMask));
@@ -3980,7 +4014,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ subu(t6, t9, t6);
__ slt(t1, t6, zero_reg);
__ srlv(t2, t3, t6);
- __ movz(t3, t2, t1); // Only if t6 is positive.
+ __ Movz(t3, t2, t1); // Only if t6 is positive.
__ Branch(&sign, ge, t6, Operand(zero_reg));
__ subu(t6, zero_reg, t6);
@@ -3992,7 +4026,7 @@ void KeyedStoreStubCompiler::GenerateStoreExternalArray(
__ bind(&sign);
__ subu(t2, t3, zero_reg);
- __ movz(t3, t2, t5); // Only if t5 is zero.
+ __ Movz(t3, t2, t5); // Only if t5 is zero.
__ bind(&done);
@@ -4073,7 +4107,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// have been verified by the caller to not be a smi.
// Check that the key is a smi.
- __ JumpIfNotSmi(a0, &miss_force_generic);
+ __ JumpIfNotSmi(a0, &miss_force_generic, at, USE_DELAY_SLOT);
+ // The delay slot can be safely used here, a1 is an object pointer.
// Get the elements array.
__ lw(a2, FieldMemOperand(a1, JSObject::kElementsOffset));
@@ -4081,7 +4116,7 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
// Check that the key is within bounds.
__ lw(a3, FieldMemOperand(a2, FixedArray::kLengthOffset));
- __ Branch(&miss_force_generic, hs, a0, Operand(a3));
+ __ Branch(USE_DELAY_SLOT, &miss_force_generic, hs, a0, Operand(a3));
// Load the result and make sure it's not the hole.
__ Addu(a3, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4091,8 +4126,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) {
__ lw(t0, MemOperand(t0));
__ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
__ Branch(&miss_force_generic, eq, t0, Operand(t1));
+ __ Ret(USE_DELAY_SLOT);
__ mov(v0, t0);
- __ Ret();
__ bind(&miss_force_generic);
Handle<Code> stub =
@@ -4173,7 +4208,8 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
void KeyedStoreStubCompiler::GenerateStoreFastElement(
MacroAssembler* masm,
bool is_js_array,
- ElementsKind elements_kind) {
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- a0 : value
// -- a1 : key
@@ -4182,15 +4218,17 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// -- a3 : scratch
// -- a4 : scratch (elements)
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, grow, slow;
+ Label finish_store, check_capacity;
Register value_reg = a0;
Register key_reg = a1;
Register receiver_reg = a2;
- Register scratch = a3;
- Register elements_reg = t0;
- Register scratch2 = t1;
- Register scratch3 = t2;
+ Register scratch = t0;
+ Register elements_reg = a3;
+ Register length_reg = t1;
+ Register scratch2 = t2;
+ Register scratch3 = t3;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -4198,26 +4236,35 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi.
__ JumpIfNotSmi(key_reg, &miss_force_generic);
- // Get the elements array and make sure it is a fast element array, not 'cow'.
- __ lw(elements_reg,
- FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
- __ CheckMap(elements_reg,
- scratch,
- Heap::kFixedArrayMapRootIndex,
- &miss_force_generic,
- DONT_DO_SMI_CHECK);
+ if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ __ JumpIfNotSmi(value_reg, &transition_elements_kind);
+ }
// Check that the key is within bounds.
+ __ lw(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
if (is_js_array) {
__ lw(scratch, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
} else {
__ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
}
// Compare smis.
- __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ Branch(&grow, hs, key_reg, Operand(scratch));
+ } else {
+ __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch));
+ }
+
+ // Make sure elements is a fast element array, not 'cow'.
+ __ CheckMap(elements_reg,
+ scratch,
+ Heap::kFixedArrayMapRootIndex,
+ &miss_force_generic,
+ DONT_DO_SMI_CHECK);
+
+ __ bind(&finish_store);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- __ JumpIfNotSmi(value_reg, &transition_elements_kind);
__ Addu(scratch,
elements_reg,
Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -4254,12 +4301,79 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime.
+ __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch));
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ lw(length_reg,
+ FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ lw(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
+ __ Branch(&check_capacity, ne, elements_reg, Operand(at));
+
+ int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
+ TAG_OBJECT);
+
+ __ LoadRoot(scratch, Heap::kFixedArrayMapRootIndex);
+ __ sw(scratch, FieldMemOperand(elements_reg, JSObject::kMapOffset));
+ __ li(scratch, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+ __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+ __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
+ for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
+ __ sw(scratch, FieldMemOperand(elements_reg, FixedArray::SizeFor(i)));
+ }
+
+ // Store the element at index zero.
+ __ sw(value_reg, FieldMemOperand(elements_reg, FixedArray::SizeFor(0)));
+
+ // Install the new backing store in the JSArray.
+ __ sw(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
+ scratch, kRAHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ li(length_reg, Operand(Smi::FromInt(1)));
+ __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ Ret();
+
+ __ bind(&check_capacity);
+ // Check for cow elements, in general they are not handled by this stub
+ __ CheckMap(elements_reg,
+ scratch,
+ Heap::kFixedCOWArrayMapRootIndex,
+ &miss_force_generic,
+ DONT_DO_SMI_CHECK);
+
+ __ lw(scratch, FieldMemOperand(elements_reg, FixedArray::kLengthOffset));
+ __ Branch(&slow, hs, length_reg, Operand(scratch));
+
+ // Grow the array and finish the store.
+ __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
+ __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ Jump(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
MacroAssembler* masm,
- bool is_js_array) {
+ bool is_js_array,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- a0 : value
// -- a1 : key
@@ -4271,7 +4385,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// -- t2 : scratch (exponent_reg)
// -- t3 : scratch4
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, grow, slow;
+ Label finish_store, check_capacity;
Register value_reg = a0;
Register key_reg = a1;
@@ -4281,6 +4396,7 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
Register scratch2 = t1;
Register scratch3 = t2;
Register scratch4 = t3;
+ Register length_reg = t3;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -4298,7 +4414,13 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
}
// Compare smis, unsigned compare catches both negative and out-of-bound
// indexes.
- __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ Branch(&grow, hs, key_reg, Operand(scratch1));
+ } else {
+ __ Branch(&miss_force_generic, hs, key_reg, Operand(scratch1));
+ }
+
+ __ bind(&finish_store);
__ StoreNumberToDoubleElements(value_reg,
key_reg,
@@ -4322,6 +4444,71 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ Jump(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime.
+ __ Branch(&miss_force_generic, ne, key_reg, Operand(scratch1));
+
+ // Transition on values that can't be stored in a FixedDoubleArray.
+ Label value_is_smi;
+ __ JumpIfSmi(value_reg, &value_is_smi);
+ __ lw(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
+ __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
+ __ Branch(&transition_elements_kind, ne, scratch1, Operand(at));
+ __ bind(&value_is_smi);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ lw(length_reg,
+ FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ lw(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
+ __ Branch(&check_capacity, ne, elements_reg, Operand(at));
+
+ int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
+ TAG_OBJECT);
+
+ // Initialize the new FixedDoubleArray. Leave elements unitialized for
+ // efficiency, they are guaranteed to be initialized before use.
+ __ LoadRoot(scratch1, Heap::kFixedDoubleArrayMapRootIndex);
+ __ sw(scratch1, FieldMemOperand(elements_reg, JSObject::kMapOffset));
+ __ li(scratch1, Operand(Smi::FromInt(JSArray::kPreallocatedArrayElements)));
+ __ sw(scratch1,
+ FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
+
+ // Install the new backing store in the JSArray.
+ __ sw(elements_reg,
+ FieldMemOperand(receiver_reg, JSObject::kElementsOffset));
+ __ RecordWriteField(receiver_reg, JSObject::kElementsOffset, elements_reg,
+ scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ li(length_reg, Operand(Smi::FromInt(1)));
+ __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&check_capacity);
+ // Make sure that the backing store can hold additional elements.
+ __ lw(scratch1,
+ FieldMemOperand(elements_reg, FixedDoubleArray::kLengthOffset));
+ __ Branch(&slow, hs, length_reg, Operand(scratch1));
+
+ // Grow the array and finish the store.
+ __ Addu(length_reg, length_reg, Operand(Smi::FromInt(1)));
+ __ sw(length_reg, FieldMemOperand(receiver_reg, JSArray::kLengthOffset));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ Jump(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
diff --git a/src/3rdparty/v8/src/mirror-debugger.js b/src/3rdparty/v8/src/mirror-debugger.js
index 999252d..c43dd22 100644
--- a/src/3rdparty/v8/src/mirror-debugger.js
+++ b/src/3rdparty/v8/src/mirror-debugger.js
@@ -144,32 +144,32 @@ function inherits(ctor, superCtor) {
// Type names of the different mirrors.
-const UNDEFINED_TYPE = 'undefined';
-const NULL_TYPE = 'null';
-const BOOLEAN_TYPE = 'boolean';
-const NUMBER_TYPE = 'number';
-const STRING_TYPE = 'string';
-const OBJECT_TYPE = 'object';
-const FUNCTION_TYPE = 'function';
-const REGEXP_TYPE = 'regexp';
-const ERROR_TYPE = 'error';
-const PROPERTY_TYPE = 'property';
-const FRAME_TYPE = 'frame';
-const SCRIPT_TYPE = 'script';
-const CONTEXT_TYPE = 'context';
-const SCOPE_TYPE = 'scope';
+var UNDEFINED_TYPE = 'undefined';
+var NULL_TYPE = 'null';
+var BOOLEAN_TYPE = 'boolean';
+var NUMBER_TYPE = 'number';
+var STRING_TYPE = 'string';
+var OBJECT_TYPE = 'object';
+var FUNCTION_TYPE = 'function';
+var REGEXP_TYPE = 'regexp';
+var ERROR_TYPE = 'error';
+var PROPERTY_TYPE = 'property';
+var FRAME_TYPE = 'frame';
+var SCRIPT_TYPE = 'script';
+var CONTEXT_TYPE = 'context';
+var SCOPE_TYPE = 'scope';
// Maximum length when sending strings through the JSON protocol.
-const kMaxProtocolStringLength = 80;
+var kMaxProtocolStringLength = 80;
// Different kind of properties.
-PropertyKind = {};
+var PropertyKind = {};
PropertyKind.Named = 1;
PropertyKind.Indexed = 2;
// A copy of the PropertyType enum from global.h
-PropertyType = {};
+var PropertyType = {};
PropertyType.Normal = 0;
PropertyType.Field = 1;
PropertyType.ConstantFunction = 2;
@@ -183,7 +183,7 @@ PropertyType.NullDescriptor = 9;
// Different attributes for a property.
-PropertyAttribute = {};
+var PropertyAttribute = {};
PropertyAttribute.None = NONE;
PropertyAttribute.ReadOnly = READ_ONLY;
PropertyAttribute.DontEnum = DONT_ENUM;
@@ -191,12 +191,12 @@ PropertyAttribute.DontDelete = DONT_DELETE;
// A copy of the scope types from runtime.cc.
-ScopeType = { Global: 0,
- Local: 1,
- With: 2,
- Closure: 3,
- Catch: 4,
- Block: 5 };
+var ScopeType = { Global: 0,
+ Local: 1,
+ With: 2,
+ Closure: 3,
+ Catch: 4,
+ Block: 5 };
// Mirror hierarchy:
@@ -225,7 +225,7 @@ ScopeType = { Global: 0,
*/
function Mirror(type) {
this.type_ = type;
-};
+}
Mirror.prototype.type = function() {
@@ -239,7 +239,7 @@ Mirror.prototype.type = function() {
*/
Mirror.prototype.isValue = function() {
return this instanceof ValueMirror;
-}
+};
/**
@@ -248,7 +248,7 @@ Mirror.prototype.isValue = function() {
*/
Mirror.prototype.isUndefined = function() {
return this instanceof UndefinedMirror;
-}
+};
/**
@@ -257,7 +257,7 @@ Mirror.prototype.isUndefined = function() {
*/
Mirror.prototype.isNull = function() {
return this instanceof NullMirror;
-}
+};
/**
@@ -266,7 +266,7 @@ Mirror.prototype.isNull = function() {
*/
Mirror.prototype.isBoolean = function() {
return this instanceof BooleanMirror;
-}
+};
/**
@@ -275,7 +275,7 @@ Mirror.prototype.isBoolean = function() {
*/
Mirror.prototype.isNumber = function() {
return this instanceof NumberMirror;
-}
+};
/**
@@ -284,7 +284,7 @@ Mirror.prototype.isNumber = function() {
*/
Mirror.prototype.isString = function() {
return this instanceof StringMirror;
-}
+};
/**
@@ -293,7 +293,7 @@ Mirror.prototype.isString = function() {
*/
Mirror.prototype.isObject = function() {
return this instanceof ObjectMirror;
-}
+};
/**
@@ -302,7 +302,7 @@ Mirror.prototype.isObject = function() {
*/
Mirror.prototype.isFunction = function() {
return this instanceof FunctionMirror;
-}
+};
/**
@@ -311,7 +311,7 @@ Mirror.prototype.isFunction = function() {
*/
Mirror.prototype.isUnresolvedFunction = function() {
return this instanceof UnresolvedFunctionMirror;
-}
+};
/**
@@ -320,7 +320,7 @@ Mirror.prototype.isUnresolvedFunction = function() {
*/
Mirror.prototype.isArray = function() {
return this instanceof ArrayMirror;
-}
+};
/**
@@ -329,7 +329,7 @@ Mirror.prototype.isArray = function() {
*/
Mirror.prototype.isDate = function() {
return this instanceof DateMirror;
-}
+};
/**
@@ -338,7 +338,7 @@ Mirror.prototype.isDate = function() {
*/
Mirror.prototype.isRegExp = function() {
return this instanceof RegExpMirror;
-}
+};
/**
@@ -347,7 +347,7 @@ Mirror.prototype.isRegExp = function() {
*/
Mirror.prototype.isError = function() {
return this instanceof ErrorMirror;
-}
+};
/**
@@ -356,7 +356,7 @@ Mirror.prototype.isError = function() {
*/
Mirror.prototype.isProperty = function() {
return this instanceof PropertyMirror;
-}
+};
/**
@@ -365,7 +365,7 @@ Mirror.prototype.isProperty = function() {
*/
Mirror.prototype.isFrame = function() {
return this instanceof FrameMirror;
-}
+};
/**
@@ -374,7 +374,7 @@ Mirror.prototype.isFrame = function() {
*/
Mirror.prototype.isScript = function() {
return this instanceof ScriptMirror;
-}
+};
/**
@@ -383,7 +383,7 @@ Mirror.prototype.isScript = function() {
*/
Mirror.prototype.isContext = function() {
return this instanceof ContextMirror;
-}
+};
/**
@@ -392,7 +392,7 @@ Mirror.prototype.isContext = function() {
*/
Mirror.prototype.isScope = function() {
return this instanceof ScopeMirror;
-}
+};
/**
@@ -400,7 +400,7 @@ Mirror.prototype.isScope = function() {
*/
Mirror.prototype.allocateHandle_ = function() {
this.handle_ = next_handle_++;
-}
+};
/**
@@ -409,13 +409,13 @@ Mirror.prototype.allocateHandle_ = function() {
*/
Mirror.prototype.allocateTransientHandle_ = function() {
this.handle_ = next_transient_handle_--;
-}
+};
Mirror.prototype.toText = function() {
// Simpel to text which is used when on specialization in subclass.
return "#<" + this.constructor.name + ">";
-}
+};
/**
@@ -480,7 +480,7 @@ inherits(UndefinedMirror, ValueMirror);
UndefinedMirror.prototype.toText = function() {
return 'undefined';
-}
+};
/**
@@ -496,7 +496,7 @@ inherits(NullMirror, ValueMirror);
NullMirror.prototype.toText = function() {
return 'null';
-}
+};
/**
@@ -513,7 +513,7 @@ inherits(BooleanMirror, ValueMirror);
BooleanMirror.prototype.toText = function() {
return this.value_ ? 'true' : 'false';
-}
+};
/**
@@ -530,7 +530,7 @@ inherits(NumberMirror, ValueMirror);
NumberMirror.prototype.toText = function() {
return %NumberToString(this.value_);
-}
+};
/**
@@ -555,11 +555,11 @@ StringMirror.prototype.getTruncatedValue = function(maxLength) {
'... (length: ' + this.length() + ')';
}
return this.value_;
-}
+};
StringMirror.prototype.toText = function() {
return this.getTruncatedValue(kMaxProtocolStringLength);
-}
+};
/**
@@ -898,7 +898,7 @@ FunctionMirror.prototype.constructedBy = function(opt_max_instances) {
FunctionMirror.prototype.toText = function() {
return this.source();
-}
+};
/**
@@ -951,7 +951,7 @@ UnresolvedFunctionMirror.prototype.inferredName = function() {
UnresolvedFunctionMirror.prototype.propertyNames = function(kind, limit) {
return [];
-}
+};
/**
@@ -971,7 +971,8 @@ ArrayMirror.prototype.length = function() {
};
-ArrayMirror.prototype.indexedPropertiesFromRange = function(opt_from_index, opt_to_index) {
+ArrayMirror.prototype.indexedPropertiesFromRange = function(opt_from_index,
+ opt_to_index) {
var from_index = opt_from_index || 0;
var to_index = opt_to_index || this.length() - 1;
if (from_index > to_index) return new Array();
@@ -987,7 +988,7 @@ ArrayMirror.prototype.indexedPropertiesFromRange = function(opt_from_index, opt_
values[i - from_index] = value;
}
return values;
-}
+};
/**
@@ -1005,7 +1006,7 @@ inherits(DateMirror, ObjectMirror);
DateMirror.prototype.toText = function() {
var s = JSON.stringify(this.value_);
return s.substring(1, s.length - 1); // cut quotes
-}
+};
/**
@@ -1059,7 +1060,7 @@ RegExpMirror.prototype.multiline = function() {
RegExpMirror.prototype.toText = function() {
// Simpel to text which is used when on specialization in subclass.
return "/" + this.source() + "/";
-}
+};
/**
@@ -1092,7 +1093,7 @@ ErrorMirror.prototype.toText = function() {
str = '#<Error>';
}
return str;
-}
+};
/**
@@ -1110,7 +1111,7 @@ function PropertyMirror(mirror, name, details) {
this.value_ = details[0];
this.details_ = details[1];
if (details.length > 2) {
- this.exception_ = details[2]
+ this.exception_ = details[2];
this.getter_ = details[3];
this.setter_ = details[4];
}
@@ -1120,22 +1121,22 @@ inherits(PropertyMirror, Mirror);
PropertyMirror.prototype.isReadOnly = function() {
return (this.attributes() & PropertyAttribute.ReadOnly) != 0;
-}
+};
PropertyMirror.prototype.isEnum = function() {
return (this.attributes() & PropertyAttribute.DontEnum) == 0;
-}
+};
PropertyMirror.prototype.canDelete = function() {
return (this.attributes() & PropertyAttribute.DontDelete) == 0;
-}
+};
PropertyMirror.prototype.name = function() {
return this.name_;
-}
+};
PropertyMirror.prototype.isIndexed = function() {
@@ -1145,12 +1146,12 @@ PropertyMirror.prototype.isIndexed = function() {
}
}
return true;
-}
+};
PropertyMirror.prototype.value = function() {
return MakeMirror(this.value_, false);
-}
+};
/**
@@ -1159,22 +1160,22 @@ PropertyMirror.prototype.value = function() {
*/
PropertyMirror.prototype.isException = function() {
return this.exception_ ? true : false;
-}
+};
PropertyMirror.prototype.attributes = function() {
return %DebugPropertyAttributesFromDetails(this.details_);
-}
+};
PropertyMirror.prototype.propertyType = function() {
return %DebugPropertyTypeFromDetails(this.details_);
-}
+};
PropertyMirror.prototype.insertionIndex = function() {
return %DebugPropertyIndexFromDetails(this.details_);
-}
+};
/**
@@ -1183,7 +1184,7 @@ PropertyMirror.prototype.insertionIndex = function() {
*/
PropertyMirror.prototype.hasGetter = function() {
return this.getter_ ? true : false;
-}
+};
/**
@@ -1192,7 +1193,7 @@ PropertyMirror.prototype.hasGetter = function() {
*/
PropertyMirror.prototype.hasSetter = function() {
return this.setter_ ? true : false;
-}
+};
/**
@@ -1206,7 +1207,7 @@ PropertyMirror.prototype.getter = function() {
} else {
return GetUndefinedMirror();
}
-}
+};
/**
@@ -1220,7 +1221,7 @@ PropertyMirror.prototype.setter = function() {
} else {
return GetUndefinedMirror();
}
-}
+};
/**
@@ -1233,27 +1234,27 @@ PropertyMirror.prototype.isNative = function() {
return (this.propertyType() == PropertyType.Interceptor) ||
((this.propertyType() == PropertyType.Callbacks) &&
!this.hasGetter() && !this.hasSetter());
-}
+};
-const kFrameDetailsFrameIdIndex = 0;
-const kFrameDetailsReceiverIndex = 1;
-const kFrameDetailsFunctionIndex = 2;
-const kFrameDetailsArgumentCountIndex = 3;
-const kFrameDetailsLocalCountIndex = 4;
-const kFrameDetailsSourcePositionIndex = 5;
-const kFrameDetailsConstructCallIndex = 6;
-const kFrameDetailsAtReturnIndex = 7;
-const kFrameDetailsFlagsIndex = 8;
-const kFrameDetailsFirstDynamicIndex = 9;
+var kFrameDetailsFrameIdIndex = 0;
+var kFrameDetailsReceiverIndex = 1;
+var kFrameDetailsFunctionIndex = 2;
+var kFrameDetailsArgumentCountIndex = 3;
+var kFrameDetailsLocalCountIndex = 4;
+var kFrameDetailsSourcePositionIndex = 5;
+var kFrameDetailsConstructCallIndex = 6;
+var kFrameDetailsAtReturnIndex = 7;
+var kFrameDetailsFlagsIndex = 8;
+var kFrameDetailsFirstDynamicIndex = 9;
-const kFrameDetailsNameIndex = 0;
-const kFrameDetailsValueIndex = 1;
-const kFrameDetailsNameValueSize = 2;
+var kFrameDetailsNameIndex = 0;
+var kFrameDetailsValueIndex = 1;
+var kFrameDetailsNameValueSize = 2;
-const kFrameDetailsFlagDebuggerFrameMask = 1 << 0;
-const kFrameDetailsFlagOptimizedFrameMask = 1 << 1;
-const kFrameDetailsFlagInlinedFrameIndexMask = 7 << 2;
+var kFrameDetailsFlagDebuggerFrameMask = 1 << 0;
+var kFrameDetailsFlagOptimizedFrameMask = 1 << 1;
+var kFrameDetailsFlagInlinedFrameIndexMask = 7 << 2;
/**
* Wrapper for the frame details information retreived from the VM. The frame
@@ -1284,63 +1285,63 @@ function FrameDetails(break_id, index) {
FrameDetails.prototype.frameId = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsFrameIdIndex];
-}
+};
FrameDetails.prototype.receiver = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsReceiverIndex];
-}
+};
FrameDetails.prototype.func = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsFunctionIndex];
-}
+};
FrameDetails.prototype.isConstructCall = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsConstructCallIndex];
-}
+};
FrameDetails.prototype.isAtReturn = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsAtReturnIndex];
-}
+};
FrameDetails.prototype.isDebuggerFrame = function() {
%CheckExecutionState(this.break_id_);
var f = kFrameDetailsFlagDebuggerFrameMask;
return (this.details_[kFrameDetailsFlagsIndex] & f) == f;
-}
+};
FrameDetails.prototype.isOptimizedFrame = function() {
%CheckExecutionState(this.break_id_);
var f = kFrameDetailsFlagOptimizedFrameMask;
return (this.details_[kFrameDetailsFlagsIndex] & f) == f;
-}
+};
FrameDetails.prototype.isInlinedFrame = function() {
return this.inlinedFrameIndex() > 0;
-}
+};
FrameDetails.prototype.inlinedFrameIndex = function() {
%CheckExecutionState(this.break_id_);
var f = kFrameDetailsFlagInlinedFrameIndexMask;
- return (this.details_[kFrameDetailsFlagsIndex] & f) >> 2
-}
+ return (this.details_[kFrameDetailsFlagsIndex] & f) >> 2;
+};
FrameDetails.prototype.argumentCount = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsArgumentCountIndex];
-}
+};
FrameDetails.prototype.argumentName = function(index) {
@@ -1348,9 +1349,9 @@ FrameDetails.prototype.argumentName = function(index) {
if (index >= 0 && index < this.argumentCount()) {
return this.details_[kFrameDetailsFirstDynamicIndex +
index * kFrameDetailsNameValueSize +
- kFrameDetailsNameIndex]
+ kFrameDetailsNameIndex];
}
-}
+};
FrameDetails.prototype.argumentValue = function(index) {
@@ -1358,45 +1359,45 @@ FrameDetails.prototype.argumentValue = function(index) {
if (index >= 0 && index < this.argumentCount()) {
return this.details_[kFrameDetailsFirstDynamicIndex +
index * kFrameDetailsNameValueSize +
- kFrameDetailsValueIndex]
+ kFrameDetailsValueIndex];
}
-}
+};
FrameDetails.prototype.localCount = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsLocalCountIndex];
-}
+};
FrameDetails.prototype.sourcePosition = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kFrameDetailsSourcePositionIndex];
-}
+};
FrameDetails.prototype.localName = function(index) {
%CheckExecutionState(this.break_id_);
if (index >= 0 && index < this.localCount()) {
var locals_offset = kFrameDetailsFirstDynamicIndex +
- this.argumentCount() * kFrameDetailsNameValueSize
+ this.argumentCount() * kFrameDetailsNameValueSize;
return this.details_[locals_offset +
index * kFrameDetailsNameValueSize +
- kFrameDetailsNameIndex]
+ kFrameDetailsNameIndex];
}
-}
+};
FrameDetails.prototype.localValue = function(index) {
%CheckExecutionState(this.break_id_);
if (index >= 0 && index < this.localCount()) {
var locals_offset = kFrameDetailsFirstDynamicIndex +
- this.argumentCount() * kFrameDetailsNameValueSize
+ this.argumentCount() * kFrameDetailsNameValueSize;
return this.details_[locals_offset +
index * kFrameDetailsNameValueSize +
- kFrameDetailsValueIndex]
+ kFrameDetailsValueIndex];
}
-}
+};
FrameDetails.prototype.returnValue = function() {
@@ -1407,12 +1408,12 @@ FrameDetails.prototype.returnValue = function() {
if (this.details_[kFrameDetailsAtReturnIndex]) {
return this.details_[return_value_offset];
}
-}
+};
FrameDetails.prototype.scopeCount = function() {
return %GetScopeCount(this.break_id_, this.frameId());
-}
+};
/**
@@ -1575,7 +1576,8 @@ FrameMirror.prototype.scope = function(index) {
};
-FrameMirror.prototype.evaluate = function(source, disable_break, opt_context_object) {
+FrameMirror.prototype.evaluate = function(source, disable_break,
+ opt_context_object) {
var result = %DebugEvaluate(this.break_id_,
this.details_.frameId(),
this.details_.inlinedFrameIndex(),
@@ -1599,7 +1601,8 @@ FrameMirror.prototype.invocationText = function() {
result += '[debugger]';
} else {
// If the receiver has a className which is 'global' don't display it.
- var display_receiver = !receiver.className || receiver.className() != 'global';
+ var display_receiver =
+ !receiver.className || (receiver.className() != 'global');
if (display_receiver) {
result += receiver.toText();
}
@@ -1661,7 +1664,7 @@ FrameMirror.prototype.invocationText = function() {
}
return result;
-}
+};
FrameMirror.prototype.sourceAndPositionText = function() {
@@ -1693,13 +1696,13 @@ FrameMirror.prototype.sourceAndPositionText = function() {
}
return result;
-}
+};
FrameMirror.prototype.localsText = function() {
// Format local variables.
var result = '';
- var locals_count = this.localCount()
+ var locals_count = this.localCount();
if (locals_count > 0) {
for (var i = 0; i < locals_count; ++i) {
result += ' var ';
@@ -1711,7 +1714,7 @@ FrameMirror.prototype.localsText = function() {
}
return result;
-}
+};
FrameMirror.prototype.toText = function(opt_locals) {
@@ -1726,11 +1729,11 @@ FrameMirror.prototype.toText = function(opt_locals) {
result += this.localsText();
}
return result;
-}
+};
-const kScopeDetailsTypeIndex = 0;
-const kScopeDetailsObjectIndex = 1;
+var kScopeDetailsTypeIndex = 0;
+var kScopeDetailsObjectIndex = 1;
function ScopeDetails(frame, index) {
this.break_id_ = frame.break_id_;
@@ -1744,13 +1747,13 @@ function ScopeDetails(frame, index) {
ScopeDetails.prototype.type = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kScopeDetailsTypeIndex];
-}
+};
ScopeDetails.prototype.object = function() {
%CheckExecutionState(this.break_id_);
return this.details_[kScopeDetailsObjectIndex];
-}
+};
/**
@@ -1829,6 +1832,11 @@ ScriptMirror.prototype.source = function() {
};
+ScriptMirror.prototype.setSource = function(source) {
+ %DebugSetScriptSource(this.script_, source);
+};
+
+
ScriptMirror.prototype.lineOffset = function() {
return this.script_.line_offset;
};
@@ -1862,12 +1870,12 @@ ScriptMirror.prototype.lineCount = function() {
ScriptMirror.prototype.locationFromPosition = function(
position, include_resource_offset) {
return this.script_.locationFromPosition(position, include_resource_offset);
-}
+};
ScriptMirror.prototype.sourceSlice = function (opt_from_line, opt_to_line) {
return this.script_.sourceSlice(opt_from_line, opt_to_line);
-}
+};
ScriptMirror.prototype.context = function() {
@@ -1907,7 +1915,7 @@ ScriptMirror.prototype.toText = function() {
}
result += ')';
return result;
-}
+};
/**
@@ -1965,7 +1973,7 @@ function JSONProtocolSerializer(details, options) {
*/
JSONProtocolSerializer.prototype.serializeReference = function(mirror) {
return this.serialize_(mirror, true, true);
-}
+};
/**
@@ -1978,7 +1986,7 @@ JSONProtocolSerializer.prototype.serializeReference = function(mirror) {
JSONProtocolSerializer.prototype.serializeValue = function(mirror) {
var json = this.serialize_(mirror, false, true);
return json;
-}
+};
/**
@@ -2000,17 +2008,17 @@ JSONProtocolSerializer.prototype.serializeReferencedObjects = function() {
}
return content;
-}
+};
JSONProtocolSerializer.prototype.includeSource_ = function() {
return this.options_ && this.options_.includeSource;
-}
+};
JSONProtocolSerializer.prototype.inlineRefs_ = function() {
return this.options_ && this.options_.inlineRefs;
-}
+};
JSONProtocolSerializer.prototype.maxStringLength_ = function() {
@@ -2019,7 +2027,7 @@ JSONProtocolSerializer.prototype.maxStringLength_ = function() {
return kMaxProtocolStringLength;
}
return this.options_.maxStringLength;
-}
+};
JSONProtocolSerializer.prototype.add_ = function(mirror) {
@@ -2032,7 +2040,7 @@ JSONProtocolSerializer.prototype.add_ = function(mirror) {
// Add the mirror to the list of mirrors to be serialized.
this.mirrors_.push(mirror);
-}
+};
/**
@@ -2139,7 +2147,7 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
break;
case PROPERTY_TYPE:
- throw new Error('PropertyMirror cannot be serialized independeltly')
+ throw new Error('PropertyMirror cannot be serialized independeltly');
break;
case FRAME_TYPE:
@@ -2179,7 +2187,7 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
mirror.evalFromScript()) {
content.evalFromScript =
this.serializeReference(mirror.evalFromScript());
- var evalFromLocation = mirror.evalFromLocation()
+ var evalFromLocation = mirror.evalFromLocation();
if (evalFromLocation) {
content.evalFromLocation = { line: evalFromLocation.line,
column: evalFromLocation.column };
@@ -2203,7 +2211,7 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
// Create and return the JSON string.
return content;
-}
+};
/**
@@ -2278,7 +2286,7 @@ JSONProtocolSerializer.prototype.serializeObject_ = function(mirror, content,
}
}
content.properties = p;
-}
+};
/**
@@ -2342,7 +2350,7 @@ JSONProtocolSerializer.prototype.serializeProperty_ = function(propertyMirror) {
result.ref = propertyValue.handle();
}
return result;
-}
+};
JSONProtocolSerializer.prototype.serializeFrame_ = function(mirror, content) {
@@ -2362,7 +2370,7 @@ JSONProtocolSerializer.prototype.serializeFrame_ = function(mirror, content) {
var x = new Array(mirror.argumentCount());
for (var i = 0; i < mirror.argumentCount(); i++) {
var arg = {};
- var argument_name = mirror.argumentName(i)
+ var argument_name = mirror.argumentName(i);
if (argument_name) {
arg.name = argument_name;
}
@@ -2392,7 +2400,7 @@ JSONProtocolSerializer.prototype.serializeFrame_ = function(mirror, content) {
index: i
});
}
-}
+};
JSONProtocolSerializer.prototype.serializeScope_ = function(mirror, content) {
@@ -2402,7 +2410,7 @@ JSONProtocolSerializer.prototype.serializeScope_ = function(mirror, content) {
content.object = this.inlineRefs_() ?
this.serializeValue(mirror.scopeObject()) :
this.serializeReference(mirror.scopeObject());
-}
+};
/**
diff --git a/src/3rdparty/v8/src/mksnapshot.cc b/src/3rdparty/v8/src/mksnapshot.cc
index 82871c0..d1620bf 100644
--- a/src/3rdparty/v8/src/mksnapshot.cc
+++ b/src/3rdparty/v8/src/mksnapshot.cc
@@ -312,7 +312,7 @@ int main(int argc, char** argv) {
}
// If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of the context.
- HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(i::Heap::kNoGCFlags, "mksnapshot");
i::Object* raw_context = *(v8::Utils::OpenHandle(*context));
context.Dispose();
CppByteSink sink(argv[1]);
diff --git a/src/3rdparty/v8/src/objects-debug.cc b/src/3rdparty/v8/src/objects-debug.cc
index 64bda94..8eefb23 100644
--- a/src/3rdparty/v8/src/objects-debug.cc
+++ b/src/3rdparty/v8/src/objects-debug.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -138,6 +138,9 @@ void HeapObject::HeapObjectVerify() {
case JS_VALUE_TYPE:
JSValue::cast(this)->JSValueVerify();
break;
+ case JS_DATE_TYPE:
+ JSDate::cast(this)->JSDateVerify();
+ break;
case JS_FUNCTION_TYPE:
JSFunction::cast(this)->JSFunctionVerify();
break;
@@ -280,7 +283,9 @@ void JSObject::JSObjectVerify() {
(map()->inobject_properties() + properties()->length() -
map()->NextFreePropertyIndex()));
}
- ASSERT_EQ((map()->has_fast_elements() || map()->has_fast_smi_only_elements()),
+ ASSERT_EQ((map()->has_fast_elements() ||
+ map()->has_fast_smi_only_elements() ||
+ (elements() == GetHeap()->empty_fixed_array())),
(elements()->map() == GetHeap()->fixed_array_map() ||
elements()->map() == GetHeap()->fixed_cow_array_map()));
ASSERT(map()->has_fast_elements() == HasFastElements());
@@ -324,6 +329,18 @@ void PolymorphicCodeCache::PolymorphicCodeCacheVerify() {
}
+void TypeFeedbackInfo::TypeFeedbackInfoVerify() {
+ VerifyObjectField(kIcTotalCountOffset);
+ VerifyObjectField(kIcWithTypeinfoCountOffset);
+ VerifyHeapPointer(type_feedback_cells());
+}
+
+
+void AliasedArgumentsEntry::AliasedArgumentsEntryVerify() {
+ VerifySmiField(kAliasedContextSlot);
+}
+
+
void FixedArray::FixedArrayVerify() {
for (int i = 0; i < length(); i++) {
Object* e = get(i);
@@ -357,6 +374,53 @@ void JSValue::JSValueVerify() {
}
+void JSDate::JSDateVerify() {
+ if (value()->IsHeapObject()) {
+ VerifyHeapPointer(value());
+ }
+ CHECK(value()->IsUndefined() || value()->IsSmi() || value()->IsHeapNumber());
+ CHECK(year()->IsUndefined() || year()->IsSmi() || year()->IsNaN());
+ CHECK(month()->IsUndefined() || month()->IsSmi() || month()->IsNaN());
+ CHECK(day()->IsUndefined() || day()->IsSmi() || day()->IsNaN());
+ CHECK(weekday()->IsUndefined() || weekday()->IsSmi() || weekday()->IsNaN());
+ CHECK(hour()->IsUndefined() || hour()->IsSmi() || hour()->IsNaN());
+ CHECK(min()->IsUndefined() || min()->IsSmi() || min()->IsNaN());
+ CHECK(sec()->IsUndefined() || sec()->IsSmi() || sec()->IsNaN());
+ CHECK(cache_stamp()->IsUndefined() ||
+ cache_stamp()->IsSmi() ||
+ cache_stamp()->IsNaN());
+
+ if (month()->IsSmi()) {
+ int month = Smi::cast(this->month())->value();
+ CHECK(0 <= month && month <= 11);
+ }
+ if (day()->IsSmi()) {
+ int day = Smi::cast(this->day())->value();
+ CHECK(1 <= day && day <= 31);
+ }
+ if (hour()->IsSmi()) {
+ int hour = Smi::cast(this->hour())->value();
+ CHECK(0 <= hour && hour <= 23);
+ }
+ if (min()->IsSmi()) {
+ int min = Smi::cast(this->min())->value();
+ CHECK(0 <= min && min <= 59);
+ }
+ if (sec()->IsSmi()) {
+ int sec = Smi::cast(this->sec())->value();
+ CHECK(0 <= sec && sec <= 59);
+ }
+ if (weekday()->IsSmi()) {
+ int weekday = Smi::cast(this->weekday())->value();
+ CHECK(0 <= weekday && weekday <= 6);
+ }
+ if (cache_stamp()->IsSmi()) {
+ CHECK(Smi::cast(cache_stamp())->value() <=
+ Smi::cast(Isolate::Current()->date_cache()->stamp())->value());
+ }
+}
+
+
void JSMessageObject::JSMessageObjectVerify() {
CHECK(IsJSMessageObject());
CHECK(type()->IsString());
@@ -388,7 +452,7 @@ void ConsString::ConsStringVerify() {
CHECK(this->first()->IsString());
CHECK(this->second() == GetHeap()->empty_string() ||
this->second()->IsString());
- CHECK(this->length() >= String::kMinNonFlatLength);
+ CHECK(this->length() >= ConsString::kMinLength);
if (this->IsFlat()) {
// A flat cons can only be created by String::SlowTryFlatten.
// Afterwards, the first part may be externalized.
@@ -468,8 +532,9 @@ void Oddball::OddballVerify() {
} else {
ASSERT(number->IsSmi());
int value = Smi::cast(number)->value();
- ASSERT(value <= 1);
// Hidden oddballs have negative smis.
+ const int kLeastHiddenOddballNumber = -4;
+ ASSERT(value <= 1);
ASSERT(value >= kLeastHiddenOddballNumber);
}
}
@@ -602,6 +667,13 @@ void AccessorInfo::AccessorInfoVerify() {
}
+void AccessorPair::AccessorPairVerify() {
+ CHECK(IsAccessorPair());
+ VerifyPointer(getter());
+ VerifyPointer(setter());
+}
+
+
void AccessCheckInfo::AccessCheckInfoVerify() {
CHECK(IsAccessCheckInfo());
VerifyPointer(named_callback());
@@ -739,7 +811,7 @@ void JSObject::IncrementSpillStatistics(SpillInformation* info) {
break;
}
case DICTIONARY_ELEMENTS: {
- NumberDictionary* dict = element_dictionary();
+ SeededNumberDictionary* dict = element_dictionary();
info->number_of_slow_used_elements_ += dict->NumberOfElements();
info->number_of_slow_unused_elements_ +=
dict->Capacity() - dict->NumberOfElements();
diff --git a/src/3rdparty/v8/src/objects-inl.h b/src/3rdparty/v8/src/objects-inl.h
index 75907c7..49c8db8 100644
--- a/src/3rdparty/v8/src/objects-inl.h
+++ b/src/3rdparty/v8/src/objects-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,7 +45,7 @@
#include "spaces.h"
#include "store-buffer.h"
#include "v8memory.h"
-
+#include "factory.h"
#include "incremental-marking.h"
namespace v8 {
@@ -94,6 +94,15 @@ PropertyDetails PropertyDetails::AsDeleted() {
}
+// Getter that returns a tagged Smi and setter that writes a tagged Smi.
+#define ACCESSORS_TO_SMI(holder, name, offset) \
+ Smi* holder::name() { return Smi::cast(READ_FIELD(this, offset)); } \
+ void holder::set_##name(Smi* value, WriteBarrierMode mode) { \
+ WRITE_FIELD(this, offset, value); \
+ }
+
+
+// Getter that returns a Smi as an int and writes an int as a Smi.
#define SMI_ACCESSORS(holder, name, offset) \
int holder::name() { \
Object* value = READ_FIELD(this, offset); \
@@ -119,6 +128,18 @@ PropertyDetails PropertyDetails::AsDeleted() {
}
+bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind) {
+ if (to_kind == FAST_ELEMENTS) {
+ return from_kind == FAST_SMI_ONLY_ELEMENTS ||
+ from_kind == FAST_DOUBLE_ELEMENTS;
+ } else {
+ return to_kind == FAST_DOUBLE_ELEMENTS &&
+ from_kind == FAST_SMI_ONLY_ELEMENTS;
+ }
+}
+
+
bool Object::IsFixedArrayBase() {
return IsFixedArray() || IsFixedDoubleArray();
}
@@ -542,6 +563,16 @@ bool Object::IsDeoptimizationOutputData() {
}
+bool Object::IsTypeFeedbackCells() {
+ if (!IsFixedArray()) return false;
+ // There's actually no way to see the difference between a fixed array and
+ // a cache cells array. Since this is used for asserts we can check that
+ // the length is plausible though.
+ if (FixedArray::cast(this)->length() % 2 != 0) return false;
+ return true;
+}
+
+
bool Object::IsContext() {
if (Object::IsHeapObject()) {
Map* map = HeapObject::cast(this)->map();
@@ -563,10 +594,10 @@ bool Object::IsGlobalContext() {
}
-bool Object::IsSerializedScopeInfo() {
+bool Object::IsScopeInfo() {
return Object::IsHeapObject() &&
HeapObject::cast(this)->map() ==
- HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
+ HeapObject::cast(this)->GetHeap()->scope_info_map();
}
@@ -583,6 +614,7 @@ TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(JSGlobalPropertyCell, JS_GLOBAL_PROPERTY_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
+TYPE_CHECKER(JSDate, JS_DATE_TYPE)
TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
@@ -778,6 +810,11 @@ double Object::Number() {
}
+bool Object::IsNaN() {
+ return this->IsHeapNumber() && isnan(HeapNumber::cast(this)->value());
+}
+
+
MaybeObject* Object::ToSmi() {
if (IsSmi()) return this;
if (IsHeapNumber()) {
@@ -907,6 +944,12 @@ MaybeObject* Object::GetProperty(String* key, PropertyAttributes* attributes) {
#define WRITE_UINT32_FIELD(p, offset, value) \
(*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
+#define READ_INT64_FIELD(p, offset) \
+ (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)))
+
+#define WRITE_INT64_FIELD(p, offset, value) \
+ (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
+
#define READ_SHORT_FIELD(p, offset) \
(*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)))
@@ -1103,7 +1146,7 @@ void HeapObject::set_map(Map* value) {
// Unsafe accessor omitting write barrier.
-void HeapObject::set_map_unsafe(Map* value) {
+void HeapObject::set_map_no_write_barrier(Map* value) {
set_map_word(MapWord::FromMap(value));
}
@@ -1171,6 +1214,22 @@ int HeapNumber::get_sign() {
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
+Object** FixedArray::GetFirstElementAddress() {
+ return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
+}
+
+
+bool FixedArray::ContainsOnlySmisOrHoles() {
+ Object* the_hole = GetHeap()->the_hole_value();
+ Object** current = GetFirstElementAddress();
+ for (int i = 0; i < length(); ++i) {
+ Object* candidate = *current++;
+ if (!candidate->IsSmi() && candidate != the_hole) return false;
+ }
+ return true;
+}
+
+
FixedArrayBase* JSObject::elements() {
Object* array = READ_FIELD(this, kElementsOffset);
return static_cast<FixedArrayBase*>(array);
@@ -1191,7 +1250,7 @@ void JSObject::ValidateSmiOnlyElements() {
map != heap->free_space_map()) {
for (int i = 0; i < fixed_array->length(); i++) {
Object* current = fixed_array->get(i);
- ASSERT(current->IsSmi() || current == heap->the_hole_value());
+ ASSERT(current->IsSmi() || current->IsTheHole());
}
}
}
@@ -1199,57 +1258,124 @@ void JSObject::ValidateSmiOnlyElements() {
}
-MaybeObject* JSObject::EnsureCanContainNonSmiElements() {
+MaybeObject* JSObject::EnsureCanContainHeapObjectElements() {
#if DEBUG
ValidateSmiOnlyElements();
#endif
- if ((map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS)) {
- Object* obj;
- MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- set_map(Map::cast(obj));
+ if ((map()->elements_kind() != FAST_ELEMENTS)) {
+ return TransitionElementsKind(FAST_ELEMENTS);
}
return this;
}
MaybeObject* JSObject::EnsureCanContainElements(Object** objects,
- uint32_t count) {
- if (map()->elements_kind() == FAST_SMI_ONLY_ELEMENTS) {
- for (uint32_t i = 0; i < count; ++i) {
- Object* current = *objects++;
- if (!current->IsSmi() && current != GetHeap()->the_hole_value()) {
- return EnsureCanContainNonSmiElements();
+ uint32_t count,
+ EnsureElementsMode mode) {
+ ElementsKind current_kind = map()->elements_kind();
+ ElementsKind target_kind = current_kind;
+ ASSERT(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (current_kind == FAST_ELEMENTS) return this;
+
+ Heap* heap = GetHeap();
+ Object* the_hole = heap->the_hole_value();
+ Object* heap_number_map = heap->heap_number_map();
+ for (uint32_t i = 0; i < count; ++i) {
+ Object* current = *objects++;
+ if (!current->IsSmi() && current != the_hole) {
+ if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS &&
+ HeapObject::cast(current)->map() == heap_number_map) {
+ target_kind = FAST_DOUBLE_ELEMENTS;
+ } else {
+ target_kind = FAST_ELEMENTS;
+ break;
}
}
}
+
+ if (target_kind != current_kind) {
+ return TransitionElementsKind(target_kind);
+ }
return this;
}
-MaybeObject* JSObject::EnsureCanContainElements(FixedArray* elements) {
- Object** objects = reinterpret_cast<Object**>(
- FIELD_ADDR(elements, elements->OffsetOfElementAt(0)));
- return EnsureCanContainElements(objects, elements->length());
+MaybeObject* JSObject::EnsureCanContainElements(FixedArrayBase* elements,
+ EnsureElementsMode mode) {
+ if (elements->map() != GetHeap()->fixed_double_array_map()) {
+ ASSERT(elements->map() == GetHeap()->fixed_array_map() ||
+ elements->map() == GetHeap()->fixed_cow_array_map());
+ if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
+ mode = DONT_ALLOW_DOUBLE_ELEMENTS;
+ }
+ Object** objects = FixedArray::cast(elements)->GetFirstElementAddress();
+ return EnsureCanContainElements(objects, elements->length(), mode);
+ }
+
+ ASSERT(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS) {
+ return TransitionElementsKind(FAST_DOUBLE_ELEMENTS);
+ }
+
+ return this;
}
-void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
- ASSERT((map()->has_fast_elements() ||
- map()->has_fast_smi_only_elements()) ==
- (value->map() == GetHeap()->fixed_array_map() ||
- value->map() == GetHeap()->fixed_cow_array_map()));
- ASSERT(map()->has_fast_double_elements() ==
- value->IsFixedDoubleArray());
+MaybeObject* JSObject::GetElementsTransitionMap(Isolate* isolate,
+ ElementsKind to_kind) {
+ Map* current_map = map();
+ ElementsKind from_kind = current_map->elements_kind();
+
+ if (from_kind == to_kind) return current_map;
+
+ Context* global_context = isolate->context()->global_context();
+ if (current_map == global_context->smi_js_array_map()) {
+ if (to_kind == FAST_ELEMENTS) {
+ return global_context->object_js_array_map();
+ } else {
+ if (to_kind == FAST_DOUBLE_ELEMENTS) {
+ return global_context->double_js_array_map();
+ } else {
+ ASSERT(to_kind == DICTIONARY_ELEMENTS);
+ }
+ }
+ }
+ return GetElementsTransitionMapSlow(to_kind);
+}
+
+
+void JSObject::set_map_and_elements(Map* new_map,
+ FixedArrayBase* value,
+ WriteBarrierMode mode) {
ASSERT(value->HasValidElements());
#ifdef DEBUG
ValidateSmiOnlyElements();
#endif
+ if (new_map != NULL) {
+ if (mode == UPDATE_WRITE_BARRIER) {
+ set_map(new_map);
+ } else {
+ ASSERT(mode == SKIP_WRITE_BARRIER);
+ set_map_no_write_barrier(new_map);
+ }
+ }
+ ASSERT((map()->has_fast_elements() ||
+ map()->has_fast_smi_only_elements() ||
+ (value == GetHeap()->empty_fixed_array())) ==
+ (value->map() == GetHeap()->fixed_array_map() ||
+ value->map() == GetHeap()->fixed_cow_array_map()));
+ ASSERT((value == GetHeap()->empty_fixed_array()) ||
+ (map()->has_fast_double_elements() == value->IsFixedDoubleArray()));
WRITE_FIELD(this, kElementsOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
}
+void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
+ set_map_and_elements(NULL, value, mode);
+}
+
+
void JSObject::initialize_properties() {
ASSERT(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
@@ -1268,7 +1394,8 @@ MaybeObject* JSObject::ResetElements() {
ElementsKind elements_kind = FLAG_smi_only_arrays
? FAST_SMI_ONLY_ELEMENTS
: FAST_ELEMENTS;
- MaybeObject* maybe_obj = GetElementsTransitionMap(elements_kind);
+ MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
+ elements_kind);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
set_map(Map::cast(obj));
initialize_elements();
@@ -1299,8 +1426,6 @@ void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) {
// The write barrier is not used for global property cells.
ASSERT(!val->IsJSGlobalPropertyCell());
WRITE_FIELD(this, kValueOffset, val);
- GetHeap()->incremental_marking()->RecordWrite(
- this, HeapObject::RawField(this, kValueOffset), val);
}
@@ -1321,12 +1446,14 @@ int JSObject::GetHeaderSize() {
return JSFunction::kSize;
case JS_VALUE_TYPE:
return JSValue::kSize;
+ case JS_DATE_TYPE:
+ return JSDate::kSize;
case JS_ARRAY_TYPE:
- return JSValue::kSize;
+ return JSArray::kSize;
case JS_WEAK_MAP_TYPE:
return JSWeakMap::kSize;
case JS_REGEXP_TYPE:
- return JSValue::kSize;
+ return JSRegExp::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
return JSObject::kHeaderSize;
case JS_MESSAGE_OBJECT_TYPE:
@@ -1343,7 +1470,7 @@ int JSObject::GetInternalFieldCount() {
// Make sure to adjust for the number of in-object properties. These
// properties do contribute to the size, but are not internal fields.
return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
- map()->inobject_properties() - (map()->has_external_resource()?1:0);
+ map()->inobject_properties();
}
@@ -1373,20 +1500,13 @@ void JSObject::SetInternalField(int index, Object* value) {
}
-void JSObject::SetExternalResourceObject(Object *value) {
- ASSERT(map()->has_external_resource());
- int offset = GetHeaderSize() + kPointerSize * GetInternalFieldCount();
+void JSObject::SetInternalField(int index, Smi* value) {
+ ASSERT(index < GetInternalFieldCount() && index >= 0);
+ // Internal objects do follow immediately after the header, whereas in-object
+ // properties are at the end of the object. Therefore there is no need
+ // to adjust the index here.
+ int offset = GetHeaderSize() + (kPointerSize * index);
WRITE_FIELD(this, offset, value);
- WRITE_BARRIER(GetHeap(), this, offset, value);
-}
-
-
-Object *JSObject::GetExternalResourceObject() {
- if (map()->has_external_resource()) {
- return READ_FIELD(this, GetHeaderSize() + kPointerSize * GetInternalFieldCount());
- } else {
- return GetHeap()->undefined_value();
- }
}
@@ -1587,6 +1707,12 @@ double FixedDoubleArray::get_scalar(int index) {
return result;
}
+int64_t FixedDoubleArray::get_representation(int index) {
+ ASSERT(map() != HEAP->fixed_cow_array_map() &&
+ map() != HEAP->fixed_array_map());
+ ASSERT(index >= 0 && index < this->length());
+ return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
+}
MaybeObject* FixedDoubleArray::get(int index) {
if (is_the_hole(index)) {
@@ -1620,65 +1746,6 @@ bool FixedDoubleArray::is_the_hole(int index) {
}
-void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
- int old_length = from->length();
- ASSERT(old_length < length());
- if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
- OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
- FIELD_ADDR(from, kHeaderSize),
- old_length * kDoubleSize);
- } else {
- for (int i = 0; i < old_length; ++i) {
- if (from->is_the_hole(i)) {
- set_the_hole(i);
- } else {
- set(i, from->get_scalar(i));
- }
- }
- }
- int offset = kHeaderSize + old_length * kDoubleSize;
- for (int current = from->length(); current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
-}
-
-
-void FixedDoubleArray::Initialize(FixedArray* from) {
- int old_length = from->length();
- ASSERT(old_length <= length());
- for (int i = 0; i < old_length; i++) {
- Object* hole_or_object = from->get(i);
- if (hole_or_object->IsTheHole()) {
- set_the_hole(i);
- } else {
- set(i, hole_or_object->Number());
- }
- }
- int offset = kHeaderSize + old_length * kDoubleSize;
- for (int current = from->length(); current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
-}
-
-
-void FixedDoubleArray::Initialize(NumberDictionary* from) {
- int offset = kHeaderSize;
- for (int current = 0; current < length(); ++current) {
- WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
- offset += kDoubleSize;
- }
- for (int i = 0; i < from->Capacity(); i++) {
- Object* key = from->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t entry = static_cast<uint32_t>(key->Number());
- set(entry, from->ValueAt(i)->Number());
- }
- }
-}
-
-
WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
Heap* heap = GetHeap();
if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
@@ -1698,6 +1765,20 @@ void FixedArray::set(int index,
}
+void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
+ int index,
+ Object* value) {
+ ASSERT(array->map() != HEAP->raw_unchecked_fixed_cow_array_map());
+ ASSERT(index >= 0 && index < array->length());
+ int offset = kHeaderSize + index * kPointerSize;
+ WRITE_FIELD(array, offset, value);
+ Heap* heap = array->GetHeap();
+ if (heap->InNewSpace(value)) {
+ heap->RecordWrite(array->address(), offset);
+ }
+}
+
+
void FixedArray::NoWriteBarrierSet(FixedArray* array,
int index,
Object* value) {
@@ -1792,12 +1873,12 @@ void DescriptorArray::set_bit_field3_storage(int value) {
}
-void DescriptorArray::NoWriteBarrierSwap(FixedArray* array,
- int first,
- int second) {
+void DescriptorArray::NoIncrementalWriteBarrierSwap(FixedArray* array,
+ int first,
+ int second) {
Object* tmp = array->get(first);
- NoWriteBarrierSet(array, first, array->get(second));
- NoWriteBarrierSet(array, second, tmp);
+ NoIncrementalWriteBarrierSet(array, first, array->get(second));
+ NoIncrementalWriteBarrierSet(array, second, tmp);
}
@@ -1877,14 +1958,33 @@ AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
bool DescriptorArray::IsProperty(int descriptor_number) {
- return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE;
+ Entry entry(this, descriptor_number);
+ return IsPropertyDescriptor(&entry);
}
-bool DescriptorArray::IsTransition(int descriptor_number) {
- PropertyType t = GetType(descriptor_number);
- return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
- t == ELEMENTS_TRANSITION;
+bool DescriptorArray::IsTransitionOnly(int descriptor_number) {
+ switch (GetType(descriptor_number)) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ return true;
+ case CALLBACKS: {
+ Object* value = GetValue(descriptor_number);
+ if (!value->IsAccessorPair()) return false;
+ AccessorPair* accessors = AccessorPair::cast(value);
+ return accessors->getter()->IsMap() && accessors->setter()->IsMap();
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // Keep the compiler happy.
+ return false;
}
@@ -1911,42 +2011,29 @@ void DescriptorArray::Set(int descriptor_number,
// Range check.
ASSERT(descriptor_number < number_of_descriptors());
- // Make sure none of the elements in desc are in new space.
- ASSERT(!HEAP->InNewSpace(desc->GetKey()));
- ASSERT(!HEAP->InNewSpace(desc->GetValue()));
-
- NoWriteBarrierSet(this,
- ToKeyIndex(descriptor_number),
- desc->GetKey());
+ NoIncrementalWriteBarrierSet(this,
+ ToKeyIndex(descriptor_number),
+ desc->GetKey());
FixedArray* content_array = GetContentArray();
- NoWriteBarrierSet(content_array,
- ToValueIndex(descriptor_number),
- desc->GetValue());
- NoWriteBarrierSet(content_array,
- ToDetailsIndex(descriptor_number),
- desc->GetDetails().AsSmi());
+ NoIncrementalWriteBarrierSet(content_array,
+ ToValueIndex(descriptor_number),
+ desc->GetValue());
+ NoIncrementalWriteBarrierSet(content_array,
+ ToDetailsIndex(descriptor_number),
+ desc->GetDetails().AsSmi());
}
-void DescriptorArray::CopyFrom(int index,
- DescriptorArray* src,
- int src_index,
- const WhitenessWitness& witness) {
- Descriptor desc;
- src->Get(src_index, &desc);
- Set(index, &desc, witness);
-}
-
-
-void DescriptorArray::NoWriteBarrierSwapDescriptors(int first, int second) {
- NoWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
+void DescriptorArray::NoIncrementalWriteBarrierSwapDescriptors(
+ int first, int second) {
+ NoIncrementalWriteBarrierSwap(this, ToKeyIndex(first), ToKeyIndex(second));
FixedArray* content_array = GetContentArray();
- NoWriteBarrierSwap(content_array,
- ToValueIndex(first),
- ToValueIndex(second));
- NoWriteBarrierSwap(content_array,
- ToDetailsIndex(first),
- ToDetailsIndex(second));
+ NoIncrementalWriteBarrierSwap(content_array,
+ ToValueIndex(first),
+ ToValueIndex(second));
+ NoIncrementalWriteBarrierSwap(content_array,
+ ToDetailsIndex(first),
+ ToDetailsIndex(second));
}
@@ -1986,13 +2073,14 @@ int HashTable<Shape, Key>::FindEntry(Key key) {
template<typename Shape, typename Key>
int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
uint32_t capacity = Capacity();
- uint32_t entry = FirstProbe(Shape::Hash(key), capacity);
+ uint32_t entry = FirstProbe(HashTable<Shape, Key>::Hash(key), capacity);
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
- if (element == isolate->heap()->undefined_value()) break; // Empty entry.
- if (element != isolate->heap()->null_value() &&
+ // Empty entry.
+ if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
+ if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
Shape::IsMatch(key, element)) return entry;
entry = NextProbe(entry, count++, capacity);
}
@@ -2000,14 +2088,14 @@ int HashTable<Shape, Key>::FindEntry(Isolate* isolate, Key key) {
}
-bool NumberDictionary::requires_slow_elements() {
+bool SeededNumberDictionary::requires_slow_elements() {
Object* max_index_object = get(kMaxNumberKeyIndex);
if (!max_index_object->IsSmi()) return false;
return 0 !=
(Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
}
-uint32_t NumberDictionary::max_number_key() {
+uint32_t SeededNumberDictionary::max_number_key() {
ASSERT(!requires_slow_elements());
Object* max_index_object = get(kMaxNumberKeyIndex);
if (!max_index_object->IsSmi()) return 0;
@@ -2015,7 +2103,7 @@ uint32_t NumberDictionary::max_number_key() {
return value >> kRequiresSlowElementsTagSize;
}
-void NumberDictionary::set_requires_slow_elements() {
+void SeededNumberDictionary::set_requires_slow_elements() {
set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
}
@@ -2029,9 +2117,11 @@ CAST_ACCESSOR(FixedDoubleArray)
CAST_ACCESSOR(DescriptorArray)
CAST_ACCESSOR(DeoptimizationInputData)
CAST_ACCESSOR(DeoptimizationOutputData)
+CAST_ACCESSOR(TypeFeedbackCells)
CAST_ACCESSOR(SymbolTable)
CAST_ACCESSOR(JSFunctionResultCache)
CAST_ACCESSOR(NormalizedMapCache)
+CAST_ACCESSOR(ScopeInfo)
CAST_ACCESSOR(CompilationCacheTable)
CAST_ACCESSOR(CodeCacheHashTable)
CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
@@ -2099,7 +2189,6 @@ SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
SMI_ACCESSORS(String, length, kLengthOffset)
-SMI_ACCESSORS(SeqString, symbol_id, kSymbolIdOffset)
uint32_t String::hash_field() {
@@ -2294,15 +2383,41 @@ void ConsString::set_second(String* value, WriteBarrierMode mode) {
}
+bool ExternalString::is_short() {
+ InstanceType type = map()->instance_type();
+ return (type & kShortExternalStringMask) == kShortExternalStringTag;
+}
+
+
const ExternalAsciiString::Resource* ExternalAsciiString::resource() {
return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
}
+void ExternalAsciiString::update_data_cache() {
+ if (is_short()) return;
+ const char** data_field =
+ reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
+ *data_field = resource()->data();
+}
+
+
void ExternalAsciiString::set_resource(
const ExternalAsciiString::Resource* resource) {
*reinterpret_cast<const Resource**>(
FIELD_ADDR(this, kResourceOffset)) = resource;
+ if (resource != NULL) update_data_cache();
+}
+
+
+const char* ExternalAsciiString::GetChars() {
+ return resource()->data();
+}
+
+
+uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
+ ASSERT(index >= 0 && index < length());
+ return GetChars()[index];
}
@@ -2311,10 +2426,36 @@ const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
}
+void ExternalTwoByteString::update_data_cache() {
+ if (is_short()) return;
+ const uint16_t** data_field =
+ reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
+ *data_field = resource()->data();
+}
+
+
void ExternalTwoByteString::set_resource(
const ExternalTwoByteString::Resource* resource) {
*reinterpret_cast<const Resource**>(
FIELD_ADDR(this, kResourceOffset)) = resource;
+ if (resource != NULL) update_data_cache();
+}
+
+
+const uint16_t* ExternalTwoByteString::GetChars() {
+ return resource()->data();
+}
+
+
+uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
+ ASSERT(index >= 0 && index < length());
+ return GetChars()[index];
+}
+
+
+const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
+ unsigned start) {
+ return GetChars() + start;
}
@@ -2749,14 +2890,14 @@ bool Map::is_extensible() {
void Map::set_attached_to_shared_function_info(bool value) {
if (value) {
- set_bit_field3(bit_field3() | (1 << kAttachedToSharedFunctionInfo));
+ set_bit_field2(bit_field2() | (1 << kAttachedToSharedFunctionInfo));
} else {
- set_bit_field3(bit_field3() & ~(1 << kAttachedToSharedFunctionInfo));
+ set_bit_field2(bit_field2() & ~(1 << kAttachedToSharedFunctionInfo));
}
}
bool Map::attached_to_shared_function_info() {
- return ((1 << kAttachedToSharedFunctionInfo) & bit_field3()) != 0;
+ return ((1 << kAttachedToSharedFunctionInfo) & bit_field2()) != 0;
}
@@ -2771,47 +2912,6 @@ void Map::set_is_shared(bool value) {
bool Map::is_shared() {
return ((1 << kIsShared) & bit_field3()) != 0;
}
-
-void Map::set_has_external_resource(bool value) {
- if (value) {
- set_bit_field(bit_field() | (1 << kHasExternalResource));
- } else {
- set_bit_field(bit_field() & ~(1 << kHasExternalResource));
- }
-}
-
-bool Map::has_external_resource()
-{
- return ((1 << kHasExternalResource) & bit_field()) != 0;
-}
-
-
-void Map::set_use_user_object_comparison(bool value) {
- if (value) {
- set_bit_field2(bit_field2() | (1 << kUseUserObjectComparison));
- } else {
- set_bit_field2(bit_field2() & ~(1 << kUseUserObjectComparison));
- }
-}
-
-bool Map::use_user_object_comparison() {
- return ((1 << kUseUserObjectComparison) & bit_field2()) != 0;
-}
-
-
-void Map::set_named_interceptor_is_fallback(bool value)
-{
- if (value) {
- set_bit_field3(bit_field3() | (1 << kNamedInterceptorIsFallback));
- } else {
- set_bit_field3(bit_field3() & ~(1 << kNamedInterceptorIsFallback));
- }
-}
-
-bool Map::named_interceptor_is_fallback()
-{
- return ((1 << kNamedInterceptorIsFallback) & bit_field3()) != 0;
-}
JSFunction* Map::unchecked_constructor() {
@@ -2904,26 +3004,26 @@ void Code::set_is_pregenerated(bool value) {
bool Code::optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
}
void Code::set_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
}
bool Code::has_deoptimization_support() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
}
void Code::set_has_deoptimization_support(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
@@ -2931,14 +3031,14 @@ void Code::set_has_deoptimization_support(bool value) {
bool Code::has_debug_break_slots() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
}
void Code::set_has_debug_break_slots(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
@@ -2946,14 +3046,14 @@ void Code::set_has_debug_break_slots(bool value) {
bool Code::is_compiled_optimizable() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
return FullCodeFlagsIsCompiledOptimizable::decode(flags);
}
void Code::set_compiled_optimizable(bool value) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
@@ -2961,18 +3061,31 @@ void Code::set_compiled_optimizable(bool value) {
int Code::allow_osr_at_loop_nesting_level() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset);
}
void Code::set_allow_osr_at_loop_nesting_level(int level) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(level >= 0 && level <= kMaxLoopNestingMarker);
WRITE_BYTE_FIELD(this, kAllowOSRAtLoopNestingLevelOffset, level);
}
+int Code::profiler_ticks() {
+ ASSERT_EQ(FUNCTION, kind());
+ return READ_BYTE_FIELD(this, kProfilerTicksOffset);
+}
+
+
+void Code::set_profiler_ticks(int ticks) {
+ ASSERT_EQ(FUNCTION, kind());
+ ASSERT(ticks < 256);
+ WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
+}
+
+
unsigned Code::stack_slots() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
return READ_UINT32_FIELD(this, kStackSlotsOffset);
@@ -2999,13 +3112,13 @@ void Code::set_safepoint_table_offset(unsigned offset) {
unsigned Code::stack_check_table_offset() {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
return READ_UINT32_FIELD(this, kStackCheckTableOffsetOffset);
}
void Code::set_stack_check_table_offset(unsigned offset) {
- ASSERT(kind() == FUNCTION);
+ ASSERT_EQ(FUNCTION, kind());
ASSERT(IsAligned(offset, static_cast<unsigned>(kIntSize)));
WRITE_UINT32_FIELD(this, kStackCheckTableOffsetOffset, offset);
}
@@ -3202,7 +3315,7 @@ void Map::set_prototype(Object* value, WriteBarrierMode mode) {
DescriptorArray* Map::instance_descriptors() {
Object* object = READ_FIELD(this, kInstanceDescriptorsOrBitField3Offset);
if (object->IsSmi()) {
- return HEAP->empty_descriptor_array();
+ return GetHeap()->empty_descriptor_array();
} else {
return DescriptorArray::cast(object);
}
@@ -3301,7 +3414,10 @@ ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
ACCESSORS(AccessorInfo, data, Object, kDataOffset)
ACCESSORS(AccessorInfo, name, Object, kNameOffset)
-ACCESSORS(AccessorInfo, flag, Smi, kFlagOffset)
+ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
+
+ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
+ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
@@ -3313,7 +3429,6 @@ ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
-ACCESSORS(InterceptorInfo, is_fallback, Smi, kFallbackOffset)
ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
@@ -3340,15 +3455,11 @@ ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
kInstanceCallHandlerOffset)
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
kAccessCheckInfoOffset)
-ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
+ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
kInternalFieldCountOffset)
-ACCESSORS(ObjectTemplateInfo, has_external_resource, Object,
- kHasExternalResourceOffset)
-ACCESSORS(ObjectTemplateInfo, use_user_object_comparison, Object,
- kUseUserObjectComparisonOffset)
ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
@@ -3358,17 +3469,18 @@ ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
ACCESSORS(Script, source, Object, kSourceOffset)
ACCESSORS(Script, name, Object, kNameOffset)
ACCESSORS(Script, id, Object, kIdOffset)
-ACCESSORS(Script, line_offset, Smi, kLineOffsetOffset)
-ACCESSORS(Script, column_offset, Smi, kColumnOffsetOffset)
+ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
+ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
ACCESSORS(Script, data, Object, kDataOffset)
ACCESSORS(Script, context_data, Object, kContextOffset)
ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
-ACCESSORS(Script, type, Smi, kTypeOffset)
-ACCESSORS(Script, compilation_type, Smi, kCompilationTypeOffset)
+ACCESSORS_TO_SMI(Script, type, kTypeOffset)
+ACCESSORS_TO_SMI(Script, compilation_type, kCompilationTypeOffset)
+ACCESSORS_TO_SMI(Script, compilation_state, kCompilationStateOffset)
ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
-ACCESSORS(Script, eval_from_instructions_offset, Smi,
- kEvalFrominstructionsOffsetOffset)
+ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
+ kEvalFrominstructionsOffsetOffset)
#ifdef ENABLE_DEBUGGER_SUPPORT
ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
@@ -3376,9 +3488,9 @@ ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
-ACCESSORS(BreakPointInfo, code_position, Smi, kCodePositionIndex)
-ACCESSORS(BreakPointInfo, source_position, Smi, kSourcePositionIndex)
-ACCESSORS(BreakPointInfo, statement_position, Smi, kStatementPositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
+ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
#endif
@@ -3393,6 +3505,8 @@ ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
ACCESSORS(SharedFunctionInfo, this_property_assignments, Object,
kThisPropertyAssignmentsOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ic_age, kICAgeOffset)
+
BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
kHiddenPrototypeBit)
@@ -3440,6 +3554,8 @@ SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
SMI_ACCESSORS(SharedFunctionInfo, this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+SMI_ACCESSORS(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
@@ -3490,6 +3606,9 @@ PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
this_property_assignments_count,
kThisPropertyAssignmentsCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, opt_count, kOptCountOffset)
+
+PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
+PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, deopt_counter, kDeoptCounterOffset)
#endif
@@ -3511,7 +3630,7 @@ BOOL_ACCESSORS(SharedFunctionInfo,
bool SharedFunctionInfo::IsInobjectSlackTrackingInProgress() {
- return initial_map() != HEAP->undefined_value();
+ return initial_map() != GetHeap()->undefined_value();
}
@@ -3533,31 +3652,49 @@ void SharedFunctionInfo::set_optimization_disabled(bool disable) {
}
-StrictModeFlag SharedFunctionInfo::strict_mode_flag() {
- return BooleanBit::get(compiler_hints(), kStrictModeFunction)
- ? kStrictMode : kNonStrictMode;
+LanguageMode SharedFunctionInfo::language_mode() {
+ int hints = compiler_hints();
+ if (BooleanBit::get(hints, kExtendedModeFunction)) {
+ ASSERT(BooleanBit::get(hints, kStrictModeFunction));
+ return EXTENDED_MODE;
+ }
+ return BooleanBit::get(hints, kStrictModeFunction)
+ ? STRICT_MODE : CLASSIC_MODE;
}
-void SharedFunctionInfo::set_strict_mode_flag(StrictModeFlag strict_mode_flag) {
- ASSERT(strict_mode_flag == kStrictMode ||
- strict_mode_flag == kNonStrictMode);
- bool value = strict_mode_flag == kStrictMode;
- set_compiler_hints(
- BooleanBit::set(compiler_hints(), kStrictModeFunction, value));
+void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
+ // We only allow language mode transitions that go set the same language mode
+ // again or go up in the chain:
+ // CLASSIC_MODE -> STRICT_MODE -> EXTENDED_MODE.
+ ASSERT(this->language_mode() == CLASSIC_MODE ||
+ this->language_mode() == language_mode ||
+ language_mode == EXTENDED_MODE);
+ int hints = compiler_hints();
+ hints = BooleanBit::set(
+ hints, kStrictModeFunction, language_mode != CLASSIC_MODE);
+ hints = BooleanBit::set(
+ hints, kExtendedModeFunction, language_mode == EXTENDED_MODE);
+ set_compiler_hints(hints);
}
-BOOL_GETTER(SharedFunctionInfo, compiler_hints, strict_mode,
- kStrictModeFunction)
-BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, qml_mode,
- kQmlModeFunction)
+bool SharedFunctionInfo::is_classic_mode() {
+ return !BooleanBit::get(compiler_hints(), kStrictModeFunction);
+}
+
+BOOL_GETTER(SharedFunctionInfo, compiler_hints, is_extended_mode,
+ kExtendedModeFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
name_should_print_as_anonymous,
kNameShouldPrintAsAnonymous)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_optimize,
+ kDontOptimize)
+BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_inline, kDontInline)
ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
@@ -3611,13 +3748,12 @@ void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
}
-SerializedScopeInfo* SharedFunctionInfo::scope_info() {
- return reinterpret_cast<SerializedScopeInfo*>(
- READ_FIELD(this, kScopeInfoOffset));
+ScopeInfo* SharedFunctionInfo::scope_info() {
+ return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
}
-void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
+void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
WriteBarrierMode mode) {
WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
CONDITIONAL_WRITE_BARRIER(GetHeap(),
@@ -3628,16 +3764,6 @@ void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
}
-Smi* SharedFunctionInfo::deopt_counter() {
- return reinterpret_cast<Smi*>(READ_FIELD(this, kDeoptCounterOffset));
-}
-
-
-void SharedFunctionInfo::set_deopt_counter(Smi* value) {
- WRITE_FIELD(this, kDeoptCounterOffset, value);
-}
-
-
bool SharedFunctionInfo::is_compiled() {
return code() !=
Isolate::Current()->builtins()->builtin(Builtins::kLazyCompile);
@@ -3672,8 +3798,8 @@ int SharedFunctionInfo::code_age() {
void SharedFunctionInfo::set_code_age(int code_age) {
- set_compiler_hints(compiler_hints() |
- ((code_age & kCodeAgeMask) << kCodeAgeShift));
+ int hints = compiler_hints() & ~(kCodeAgeMask << kCodeAgeShift);
+ set_compiler_hints(hints | ((code_age & kCodeAgeMask) << kCodeAgeShift));
}
@@ -3784,6 +3910,40 @@ void JSFunction::set_initial_map(Map* value) {
}
+MaybeObject* JSFunction::set_initial_map_and_cache_transitions(
+ Map* initial_map) {
+ Context* global_context = context()->global_context();
+ Object* array_function =
+ global_context->get(Context::ARRAY_FUNCTION_INDEX);
+ if (array_function->IsJSFunction() &&
+ this == JSFunction::cast(array_function)) {
+ ASSERT(initial_map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+
+ MaybeObject* maybe_map = initial_map->CopyDropTransitions();
+ Map* new_double_map = NULL;
+ if (!maybe_map->To<Map>(&new_double_map)) return maybe_map;
+ new_double_map->set_elements_kind(FAST_DOUBLE_ELEMENTS);
+ maybe_map = initial_map->AddElementsTransition(FAST_DOUBLE_ELEMENTS,
+ new_double_map);
+ if (maybe_map->IsFailure()) return maybe_map;
+
+ maybe_map = new_double_map->CopyDropTransitions();
+ Map* new_object_map = NULL;
+ if (!maybe_map->To<Map>(&new_object_map)) return maybe_map;
+ new_object_map->set_elements_kind(FAST_ELEMENTS);
+ maybe_map = new_double_map->AddElementsTransition(FAST_ELEMENTS,
+ new_object_map);
+ if (maybe_map->IsFailure()) return maybe_map;
+
+ global_context->set_smi_js_array_map(initial_map);
+ global_context->set_double_js_array_map(new_double_map);
+ global_context->set_object_js_array_map(new_object_map);
+ }
+ set_initial_map(initial_map);
+ return this;
+}
+
+
bool JSFunction::has_initial_map() {
return prototype_or_initial_map()->IsMap();
}
@@ -3908,11 +4068,6 @@ ACCESSORS(JSWeakMap, table, Object, kTableOffset)
ACCESSORS(JSWeakMap, next, Object, kNextOffset)
-ObjectHashTable* JSWeakMap::unchecked_table() {
- return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
-}
-
-
Address Foreign::foreign_address() {
return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
}
@@ -3933,6 +4088,24 @@ JSValue* JSValue::cast(Object* obj) {
}
+ACCESSORS(JSDate, value, Object, kValueOffset)
+ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
+ACCESSORS(JSDate, year, Object, kYearOffset)
+ACCESSORS(JSDate, month, Object, kMonthOffset)
+ACCESSORS(JSDate, day, Object, kDayOffset)
+ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
+ACCESSORS(JSDate, hour, Object, kHourOffset)
+ACCESSORS(JSDate, min, Object, kMinOffset)
+ACCESSORS(JSDate, sec, Object, kSecOffset)
+
+
+JSDate* JSDate::cast(Object* obj) {
+ ASSERT(obj->IsJSDate());
+ ASSERT(HeapObject::cast(obj)->Size() == JSDate::kSize);
+ return reinterpret_cast<JSDate*>(obj);
+}
+
+
ACCESSORS(JSMessageObject, type, String, kTypeOffset)
ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
@@ -3951,10 +4124,11 @@ JSMessageObject* JSMessageObject::cast(Object* obj) {
INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
+ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
-ACCESSORS(Code, next_code_flushing_candidate,
- Object, kNextCodeFlushingCandidateOffset)
-
+ACCESSORS(Code, type_feedback_info, Object, kTypeFeedbackInfoOffset)
+ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
+INT_ACCESSORS(Code, ic_age, kICAgeOffset)
byte* Code::instruction_start() {
return FIELD_ADDR(this, kHeaderSize);
@@ -4093,7 +4267,8 @@ ElementsKind JSObject::GetElementsKind() {
(map == GetHeap()->fixed_array_map() ||
map == GetHeap()->fixed_cow_array_map())) ||
(kind == FAST_DOUBLE_ELEMENTS &&
- fixed_array->IsFixedDoubleArray()) ||
+ (fixed_array->IsFixedDoubleArray() ||
+ fixed_array == GetHeap()->empty_fixed_array())) ||
(kind == DICTIONARY_ELEMENTS &&
fixed_array->IsFixedArray() &&
fixed_array->IsDictionary()) ||
@@ -4184,14 +4359,6 @@ bool JSObject::HasIndexedInterceptor() {
}
-bool JSObject::AllowsSetElementsLength() {
- bool result = elements()->IsFixedArray() ||
- elements()->IsFixedDoubleArray();
- ASSERT(result == !HasExternalArrayElements());
- return result;
-}
-
-
MaybeObject* JSObject::EnsureWritableFastElements() {
ASSERT(HasFastTypeElements());
FixedArray* elems = FixedArray::cast(elements());
@@ -4216,9 +4383,9 @@ StringDictionary* JSObject::property_dictionary() {
}
-NumberDictionary* JSObject::element_dictionary() {
+SeededNumberDictionary* JSObject::element_dictionary() {
ASSERT(HasDictionaryElements());
- return NumberDictionary::cast(elements());
+ return SeededNumberDictionary::cast(elements());
}
@@ -4241,13 +4408,15 @@ uint32_t String::Hash() {
}
-StringHasher::StringHasher(int length)
+StringHasher::StringHasher(int length, uint32_t seed)
: length_(length),
- raw_running_hash_(0),
+ raw_running_hash_(seed),
array_index_(0),
is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
is_first_char_(true),
- is_valid_(true) { }
+ is_valid_(true) {
+ ASSERT(FLAG_randomize_hashes || raw_running_hash_ == 0);
+}
bool StringHasher::has_trivial_hash() {
@@ -4255,7 +4424,11 @@ bool StringHasher::has_trivial_hash() {
}
-void StringHasher::AddCharacter(uc32 c) {
+void StringHasher::AddCharacter(uint32_t c) {
+ if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ AddSurrogatePair(c); // Not inlined.
+ return;
+ }
// Use the Jenkins one-at-a-time hash function to update the hash
// for the given character.
raw_running_hash_ += c;
@@ -4284,8 +4457,12 @@ void StringHasher::AddCharacter(uc32 c) {
}
-void StringHasher::AddCharacterNoIndex(uc32 c) {
+void StringHasher::AddCharacterNoIndex(uint32_t c) {
ASSERT(!is_array_index());
+ if (c > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ AddSurrogatePairNoIndex(c); // Not inlined.
+ return;
+ }
raw_running_hash_ += c;
raw_running_hash_ += (raw_running_hash_ << 10);
raw_running_hash_ ^= (raw_running_hash_ >> 6);
@@ -4299,7 +4476,7 @@ uint32_t StringHasher::GetHash() {
result += (result << 3);
result ^= (result >> 11);
result += (result << 15);
- if (result == 0) {
+ if ((result & String::kHashBitMask) == 0) {
result = 27;
}
return result;
@@ -4307,8 +4484,8 @@ uint32_t StringHasher::GetHash() {
template <typename schar>
-uint32_t HashSequentialString(const schar* chars, int length) {
- StringHasher hasher(length);
+uint32_t HashSequentialString(const schar* chars, int length, uint32_t seed) {
+ StringHasher hasher(length, seed);
if (!hasher.has_trivial_hash()) {
int i;
for (i = 0; hasher.is_array_index() && (i < length); i++) {
@@ -4453,16 +4630,27 @@ bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
}
-uint32_t NumberDictionaryShape::Hash(uint32_t key) {
- return ComputeIntegerHash(key);
+uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
+ return ComputeIntegerHash(key, 0);
}
-uint32_t NumberDictionaryShape::HashForObject(uint32_t key, Object* other) {
+uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
+ Object* other) {
ASSERT(other->IsNumber());
- return ComputeIntegerHash(static_cast<uint32_t>(other->Number()));
+ return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
+}
+
+uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
+ return ComputeIntegerHash(key, seed);
}
+uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
+ uint32_t seed,
+ Object* other) {
+ ASSERT(other->IsNumber());
+ return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
+}
MaybeObject* NumberDictionaryShape::AsObject(uint32_t key) {
return Isolate::Current()->heap()->NumberFromUint32(key);
@@ -4500,7 +4688,6 @@ bool ObjectHashTableShape<entrysize>::IsMatch(Object* key, Object* other) {
template <int entrysize>
uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
- ASSERT(!key->IsUndefined() && !key->IsNull());
MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
return Smi::cast(maybe_hash->ToObjectChecked())->value();
}
@@ -4509,7 +4696,6 @@ uint32_t ObjectHashTableShape<entrysize>::Hash(Object* key) {
template <int entrysize>
uint32_t ObjectHashTableShape<entrysize>::HashForObject(Object* key,
Object* other) {
- ASSERT(!other->IsUndefined() && !other->IsNull());
MaybeObject* maybe_hash = other->GetHash(OMIT_CREATION);
return Smi::cast(maybe_hash->ToObjectChecked())->value();
}
@@ -4521,15 +4707,11 @@ MaybeObject* ObjectHashTableShape<entrysize>::AsObject(Object* key) {
}
-void ObjectHashTable::RemoveEntry(int entry) {
- RemoveEntry(entry, GetHeap());
-}
-
-
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
// - MarkCompactCollector::MarkUnmarkedObject
+ // - IncrementalMarking::Step
ASSERT(!heap->InNewSpace(heap->raw_unchecked_empty_fixed_array()));
WRITE_FIELD(this, kCodeCacheOffset, heap->raw_unchecked_empty_fixed_array());
}
@@ -4559,11 +4741,25 @@ void JSArray::set_length(Smi* length) {
}
-MaybeObject* JSArray::SetContent(FixedArray* storage) {
- MaybeObject* maybe_object = EnsureCanContainElements(storage);
- if (maybe_object->IsFailure()) return maybe_object;
- set_length(Smi::FromInt(storage->length()));
+bool JSArray::AllowsSetElementsLength() {
+ bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
+ ASSERT(result == !HasExternalArrayElements());
+ return result;
+}
+
+
+MaybeObject* JSArray::SetContent(FixedArrayBase* storage) {
+ MaybeObject* maybe_result = EnsureCanContainElements(
+ storage, ALLOW_COPIED_DOUBLE_ELEMENTS);
+ if (maybe_result->IsFailure()) return maybe_result;
+ ASSERT((storage->map() == GetHeap()->fixed_double_array_map() &&
+ GetElementsKind() == FAST_DOUBLE_ELEMENTS) ||
+ ((storage->map() != GetHeap()->fixed_double_array_map()) &&
+ ((GetElementsKind() == FAST_ELEMENTS) ||
+ (GetElementsKind() == FAST_SMI_ONLY_ELEMENTS &&
+ FixedArray::cast(storage)->ContainsOnlySmisOrHoles()))));
set_elements(storage);
+ set_length(Smi::FromInt(storage->length()));
return this;
}
@@ -4580,6 +4776,51 @@ MaybeObject* FixedDoubleArray::Copy() {
}
+void TypeFeedbackCells::SetAstId(int index, Smi* id) {
+ set(1 + index * 2, id);
+}
+
+
+Smi* TypeFeedbackCells::AstId(int index) {
+ return Smi::cast(get(1 + index * 2));
+}
+
+
+void TypeFeedbackCells::SetCell(int index, JSGlobalPropertyCell* cell) {
+ set(index * 2, cell);
+}
+
+
+JSGlobalPropertyCell* TypeFeedbackCells::Cell(int index) {
+ return JSGlobalPropertyCell::cast(get(index * 2));
+}
+
+
+Handle<Object> TypeFeedbackCells::UninitializedSentinel(Isolate* isolate) {
+ return isolate->factory()->the_hole_value();
+}
+
+
+Handle<Object> TypeFeedbackCells::MegamorphicSentinel(Isolate* isolate) {
+ return isolate->factory()->undefined_value();
+}
+
+
+Object* TypeFeedbackCells::RawUninitializedSentinel(Heap* heap) {
+ return heap->raw_unchecked_the_hole_value();
+}
+
+
+SMI_ACCESSORS(TypeFeedbackInfo, ic_total_count, kIcTotalCountOffset)
+SMI_ACCESSORS(TypeFeedbackInfo, ic_with_type_info_count,
+ kIcWithTypeinfoCountOffset)
+ACCESSORS(TypeFeedbackInfo, type_feedback_cells, TypeFeedbackCells,
+ kTypeFeedbackCellsOffset)
+
+
+SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
+
+
Relocatable::Relocatable(Isolate* isolate) {
ASSERT(isolate == Isolate::Current());
isolate_ = isolate;
@@ -4662,22 +4903,27 @@ void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
#undef SLOT_ADDR
-
+#undef TYPE_CHECKER
#undef CAST_ACCESSOR
#undef INT_ACCESSORS
-#undef SMI_ACCESSORS
#undef ACCESSORS
+#undef ACCESSORS_TO_SMI
+#undef SMI_ACCESSORS
+#undef BOOL_GETTER
+#undef BOOL_ACCESSORS
#undef FIELD_ADDR
#undef READ_FIELD
#undef WRITE_FIELD
#undef WRITE_BARRIER
#undef CONDITIONAL_WRITE_BARRIER
-#undef READ_MEMADDR_FIELD
-#undef WRITE_MEMADDR_FIELD
#undef READ_DOUBLE_FIELD
#undef WRITE_DOUBLE_FIELD
#undef READ_INT_FIELD
#undef WRITE_INT_FIELD
+#undef READ_INTPTR_FIELD
+#undef WRITE_INTPTR_FIELD
+#undef READ_UINT32_FIELD
+#undef WRITE_UINT32_FIELD
#undef READ_SHORT_FIELD
#undef WRITE_SHORT_FIELD
#undef READ_BYTE_FIELD
diff --git a/src/3rdparty/v8/src/objects-printer.cc b/src/3rdparty/v8/src/objects-printer.cc
index 1ca97de..2353a95 100644
--- a/src/3rdparty/v8/src/objects-printer.cc
+++ b/src/3rdparty/v8/src/objects-printer.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -151,6 +151,9 @@ void HeapObject::HeapObjectPrint(FILE* out) {
PrintF(out, "Value wrapper around:");
JSValue::cast(this)->value()->Print(out);
break;
+ case JS_DATE_TYPE:
+ JSDate::cast(this)->value()->Print(out);
+ break;
case CODE_TYPE:
Code::cast(this)->CodePrint(out);
break;
@@ -295,7 +298,9 @@ void JSObject::PrintProperties(FILE* out) {
case NULL_DESCRIPTOR:
PrintF(out, "(null descriptor)\n");
break;
- default:
+ case NORMAL: // only in slow mode
+ case HANDLER: // only in lookup results, not in descriptors
+ case INTERCEPTOR: // only in lookup results, not in descriptors
UNREACHABLE();
break;
}
@@ -444,6 +449,9 @@ static const char* TypeToString(InstanceType type) {
case EXTERNAL_ASCII_SYMBOL_TYPE:
case EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE:
case EXTERNAL_SYMBOL_TYPE: return "EXTERNAL_SYMBOL";
+ case SHORT_EXTERNAL_ASCII_SYMBOL_TYPE:
+ case SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE:
+ case SHORT_EXTERNAL_SYMBOL_TYPE: return "SHORT_EXTERNAL_SYMBOL";
case ASCII_STRING_TYPE: return "ASCII_STRING";
case STRING_TYPE: return "TWO_BYTE_STRING";
case CONS_STRING_TYPE:
@@ -451,6 +459,9 @@ static const char* TypeToString(InstanceType type) {
case EXTERNAL_ASCII_STRING_TYPE:
case EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
case EXTERNAL_STRING_TYPE: return "EXTERNAL_STRING";
+ case SHORT_EXTERNAL_ASCII_STRING_TYPE:
+ case SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE:
+ case SHORT_EXTERNAL_STRING_TYPE: return "SHORT_EXTERNAL_STRING";
case FIXED_ARRAY_TYPE: return "FIXED_ARRAY";
case BYTE_ARRAY_TYPE: return "BYTE_ARRAY";
case FREE_SPACE_TYPE: return "FREE_SPACE";
@@ -546,6 +557,21 @@ void PolymorphicCodeCache::PolymorphicCodeCachePrint(FILE* out) {
}
+void TypeFeedbackInfo::TypeFeedbackInfoPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "TypeFeedbackInfo");
+ PrintF(out, "\n - ic_total_count: %d, ic_with_type_info_count: %d",
+ ic_total_count(), ic_with_type_info_count());
+ PrintF(out, "\n - type_feedback_cells: ");
+ type_feedback_cells()->FixedArrayPrint(out);
+}
+
+
+void AliasedArgumentsEntry::AliasedArgumentsEntryPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "AliasedArgumentsEntry");
+ PrintF(out, "\n - aliased_context_slot: %d", aliased_context_slot());
+}
+
+
void FixedArray::FixedArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "FixedArray");
PrintF(out, " - length: %d", length());
@@ -561,7 +587,11 @@ void FixedDoubleArray::FixedDoubleArrayPrint(FILE* out) {
HeapObject::PrintHeader(out, "FixedDoubleArray");
PrintF(out, " - length: %d", length());
for (int i = 0; i < length(); i++) {
- PrintF(out, "\n [%d]: %g", i, get_scalar(i));
+ if (is_the_hole(i)) {
+ PrintF(out, "\n [%d]: <the hole>", i);
+ } else {
+ PrintF(out, "\n [%d]: %g", i, get_scalar(i));
+ }
}
PrintF(out, "\n");
}
@@ -619,7 +649,7 @@ void String::StringPrint(FILE* out) {
// This method is only meant to be called from gdb for debugging purposes.
-// Since the string can also be in two-byte encoding, non-ascii characters
+// Since the string can also be in two-byte encoding, non-ASCII characters
// will be ignored in the output.
char* String::ToAsciiArray() {
// Static so that subsequent calls frees previously allocated space.
@@ -633,6 +663,30 @@ char* String::ToAsciiArray() {
}
+static const char* const weekdays[] = {
+ "???", "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"
+};
+
+void JSDate::JSDatePrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSDate");
+ PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - value = ");
+ value()->Print(out);
+ if (!year()->IsSmi()) {
+ PrintF(out, " - time = NaN\n");
+ } else {
+ PrintF(out, " - time = %s %04d/%02d/%02d %02d:%02d:%02d\n",
+ weekdays[weekday()->IsSmi() ? Smi::cast(weekday())->value() + 1 : 0],
+ year()->IsSmi() ? Smi::cast(year())->value() : -1,
+ month()->IsSmi() ? Smi::cast(month())->value() : -1,
+ day()->IsSmi() ? Smi::cast(day())->value() : -1,
+ hour()->IsSmi() ? Smi::cast(hour())->value() : -1,
+ min()->IsSmi() ? Smi::cast(min())->value() : -1,
+ sec()->IsSmi() ? Smi::cast(sec())->value() : -1);
+ }
+}
+
+
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
@@ -778,6 +832,15 @@ void AccessorInfo::AccessorInfoPrint(FILE* out) {
}
+void AccessorPair::AccessorPairPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "AccessorPair");
+ PrintF(out, "\n - getter: ");
+ getter()->ShortPrint(out);
+ PrintF(out, "\n - setter: ");
+ setter()->ShortPrint(out);
+}
+
+
void AccessCheckInfo::AccessCheckInfoPrint(FILE* out) {
HeapObject::PrintHeader(out, "AccessCheckInfo");
PrintF(out, "\n - named_callback: ");
diff --git a/src/3rdparty/v8/src/objects-visiting-inl.h b/src/3rdparty/v8/src/objects-visiting-inl.h
index 6f0f61d..627d1bc 100644
--- a/src/3rdparty/v8/src/objects-visiting-inl.h
+++ b/src/3rdparty/v8/src/objects-visiting-inl.h
@@ -104,8 +104,12 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+ // There are two places where we iterate code bodies: here and the
+ // templated CodeIterateBody (below). They should be kept in sync.
IteratePointer(v, kRelocationInfoOffset);
+ IteratePointer(v, kHandlerTableOffset);
IteratePointer(v, kDeoptimizationDataOffset);
+ IteratePointer(v, kTypeFeedbackInfoOffset);
RelocIterator it(this, mode_mask);
for (; !it.done(); it.next()) {
@@ -124,12 +128,20 @@ void Code::CodeIterateBody(Heap* heap) {
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
+ // There are two places where we iterate code bodies: here and the
+ // non-templated CodeIterateBody (above). They should be kept in sync.
StaticVisitor::VisitPointer(
heap,
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
StaticVisitor::VisitPointer(
heap,
+ reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
+ StaticVisitor::VisitPointer(
+ heap,
reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
+ StaticVisitor::VisitPointer(
+ heap,
+ reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
RelocIterator it(this, mode_mask);
for (; !it.done(); it.next()) {
diff --git a/src/3rdparty/v8/src/objects-visiting.cc b/src/3rdparty/v8/src/objects-visiting.cc
index a796283..c7c8a87 100644
--- a/src/3rdparty/v8/src/objects-visiting.cc
+++ b/src/3rdparty/v8/src/objects-visiting.cc
@@ -64,7 +64,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case kExternalStringTag:
return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
- ExternalString::kSize);
+ instance_size);
}
UNREACHABLE();
}
@@ -134,6 +134,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
+ case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
diff --git a/src/3rdparty/v8/src/objects-visiting.h b/src/3rdparty/v8/src/objects-visiting.h
index e6ddfed..26e79ae 100644
--- a/src/3rdparty/v8/src/objects-visiting.h
+++ b/src/3rdparty/v8/src/objects-visiting.h
@@ -135,7 +135,7 @@ class StaticVisitorBase : public AllStatic {
(base == kVisitJSObject));
ASSERT(IsAligned(object_size, kPointerSize));
ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
- ASSERT(object_size < Page::kMaxHeapObjectSize);
+ ASSERT(object_size < Page::kMaxNonCodeHeapObjectSize);
const VisitorId specialization = static_cast<VisitorId>(
base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
diff --git a/src/3rdparty/v8/src/objects.cc b/src/3rdparty/v8/src/objects.cc
index 3cd14a5..a4f63a1 100644
--- a/src/3rdparty/v8/src/objects.cc
+++ b/src/3rdparty/v8/src/objects.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,6 +33,7 @@
#include "codegen.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "date.h"
#include "elements.h"
#include "execution.h"
#include "full-codegen.h"
@@ -56,58 +57,11 @@ namespace v8 {
namespace internal {
void PrintElementsKind(FILE* out, ElementsKind kind) {
- switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- PrintF(out, "FAST_SMI_ONLY_ELEMENTS");
- break;
- case FAST_ELEMENTS:
- PrintF(out, "FAST_ELEMENTS");
- break;
- case FAST_DOUBLE_ELEMENTS:
- PrintF(out, "FAST_DOUBLE_ELEMENTS");
- break;
- case DICTIONARY_ELEMENTS:
- PrintF(out, "DICTIONARY_ELEMENTS");
- break;
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- PrintF(out, "NON_STRICT_ARGUMENTS_ELEMENTS");
- break;
- case EXTERNAL_BYTE_ELEMENTS:
- PrintF(out, "EXTERNAL_BYTE_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_BYTE_ELEMENTS");
- break;
- case EXTERNAL_SHORT_ELEMENTS:
- PrintF(out, "EXTERNAL_SHORT_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_SHORT_ELEMENTS");
- break;
- case EXTERNAL_INT_ELEMENTS:
- PrintF(out, "EXTERNAL_INT_ELEMENTS");
- break;
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- PrintF(out, "EXTERNAL_UNSIGNED_INT_ELEMENTS");
- break;
- case EXTERNAL_FLOAT_ELEMENTS:
- PrintF(out, "EXTERNAL_FLOAT_ELEMENTS");
- break;
- case EXTERNAL_DOUBLE_ELEMENTS:
- PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- PrintF(out, "EXTERNAL_DOUBLE_ELEMENTS");
- break;
- }
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
+ PrintF(out, "%s", accessor->name());
}
-// Getters and setters are stored in a fixed array property. These are
-// constants for their indices.
-const int kGetterIndex = 0;
-const int kSetterIndex = 1;
-
MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor,
Object* value) {
Object* result;
@@ -251,8 +205,8 @@ MaybeObject* JSObject::GetPropertyWithCallback(Object* receiver,
}
// __defineGetter__ callback
- if (structure->IsFixedArray()) {
- Object* getter = FixedArray::cast(structure)->get(kGetterIndex);
+ if (structure->IsAccessorPair()) {
+ Object* getter = AccessorPair::cast(structure)->getter();
if (getter->IsSpecFunction()) {
// TODO(rossberg): nicer would be to cast to some JSCallable here...
return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
@@ -334,7 +288,7 @@ MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver,
bool has_pending_exception;
Handle<Object> result =
- Execution::Call(fun, self, 0, NULL, &has_pending_exception);
+ Execution::Call(fun, self, 0, NULL, &has_pending_exception, true);
// Check for pending exception and return the result.
if (has_pending_exception) return Failure::Exception();
return *result;
@@ -490,6 +444,16 @@ Object* JSObject::SetNormalizedProperty(LookupResult* result, Object* value) {
}
+Handle<Object> JSObject::SetNormalizedProperty(Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyDetails details) {
+ CALL_HEAP_FUNCTION(object->GetIsolate(),
+ object->SetNormalizedProperty(*key, *value, details),
+ Object);
+}
+
+
MaybeObject* JSObject::SetNormalizedProperty(String* name,
Object* value,
PropertyDetails details) {
@@ -728,10 +692,7 @@ MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
if (js_object->elements() != heap->empty_fixed_array()) {
MaybeObject* result = js_object->GetElementsAccessor()->Get(
- js_object->elements(),
- index,
- js_object,
- receiver);
+ receiver, js_object, index);
if (result != heap->the_hole_value()) return result;
}
}
@@ -931,7 +892,7 @@ MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) {
len - first_length);
}
cs->set_first(result);
- cs->set_second(heap->empty_string());
+ cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER);
return result;
}
default:
@@ -957,41 +918,39 @@ bool String::MakeExternal(v8::String::ExternalStringResource* resource) {
#endif // DEBUG
Heap* heap = GetHeap();
int size = this->Size(); // Byte size of the original string.
- if (size < ExternalString::kSize) {
- // The string is too small to fit an external String in its place. This can
- // only happen for zero length strings.
+ if (size < ExternalString::kShortSize) {
return false;
}
- ASSERT(size >= ExternalString::kSize);
bool is_ascii = this->IsAsciiRepresentation();
bool is_symbol = this->IsSymbol();
- int length = this->length();
- int hash_field = this->hash_field();
// Morph the object to an external string by adjusting the map and
// reinitializing the fields.
- this->set_map(is_ascii ?
- heap->external_string_with_ascii_data_map() :
- heap->external_string_map());
+ if (size >= ExternalString::kSize) {
+ this->set_map_no_write_barrier(
+ is_symbol
+ ? (is_ascii ? heap->external_symbol_with_ascii_data_map()
+ : heap->external_symbol_map())
+ : (is_ascii ? heap->external_string_with_ascii_data_map()
+ : heap->external_string_map()));
+ } else {
+ this->set_map_no_write_barrier(
+ is_symbol
+ ? (is_ascii ? heap->short_external_symbol_with_ascii_data_map()
+ : heap->short_external_symbol_map())
+ : (is_ascii ? heap->short_external_string_with_ascii_data_map()
+ : heap->short_external_string_map()));
+ }
ExternalTwoByteString* self = ExternalTwoByteString::cast(this);
- self->set_length(length);
- self->set_hash_field(hash_field);
self->set_resource(resource);
- // Additionally make the object into an external symbol if the original string
- // was a symbol to start with.
- if (is_symbol) {
- self->Hash(); // Force regeneration of the hash value.
- // Now morph this external string into a external symbol.
- this->set_map(is_ascii ?
- heap->external_symbol_with_ascii_data_map() :
- heap->external_symbol_map());
- }
+ if (is_symbol) self->Hash(); // Force regeneration of the hash value.
// Fill the remainder of the string with dead wood.
int new_size = this->Size(); // Byte size of the external String object.
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
- MemoryChunk::IncrementLiveBytes(this->address(), new_size - size);
+ MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
+ new_size - size);
}
return true;
}
@@ -1011,38 +970,33 @@ bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) {
#endif // DEBUG
Heap* heap = GetHeap();
int size = this->Size(); // Byte size of the original string.
- if (size < ExternalString::kSize) {
- // The string is too small to fit an external String in its place. This can
- // only happen for zero length strings.
+ if (size < ExternalString::kShortSize) {
return false;
}
- ASSERT(size >= ExternalString::kSize);
bool is_symbol = this->IsSymbol();
- int length = this->length();
- int hash_field = this->hash_field();
// Morph the object to an external string by adjusting the map and
- // reinitializing the fields.
- this->set_map(heap->external_ascii_string_map());
+ // reinitializing the fields. Use short version if space is limited.
+ if (size >= ExternalString::kSize) {
+ this->set_map_no_write_barrier(
+ is_symbol ? heap->external_ascii_symbol_map()
+ : heap->external_ascii_string_map());
+ } else {
+ this->set_map_no_write_barrier(
+ is_symbol ? heap->short_external_ascii_symbol_map()
+ : heap->short_external_ascii_string_map());
+ }
ExternalAsciiString* self = ExternalAsciiString::cast(this);
- self->set_length(length);
- self->set_hash_field(hash_field);
self->set_resource(resource);
- // Additionally make the object into an external symbol if the original string
- // was a symbol to start with.
- if (is_symbol) {
- self->Hash(); // Force regeneration of the hash value.
- // Now morph this external string into a external symbol.
- this->set_map(heap->external_ascii_symbol_map());
- }
+ if (is_symbol) self->Hash(); // Force regeneration of the hash value.
// Fill the remainder of the string with dead wood.
int new_size = this->Size(); // Byte size of the external String object.
heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
- MemoryChunk::IncrementLiveBytes(this->address(), new_size - size);
+ MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
+ new_size - size);
}
-
return true;
}
@@ -1114,7 +1068,7 @@ void JSObject::JSObjectShortPrint(StringStream* accumulator) {
switch (map()->instance_type()) {
case JS_ARRAY_TYPE: {
double length = JSArray::cast(this)->length()->Number();
- accumulator->Add("<JS array[%u]>", static_cast<uint32_t>(length));
+ accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length));
break;
}
case JS_WEAK_MAP_TYPE: {
@@ -1385,6 +1339,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
+ case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
case JS_SET_TYPE:
case JS_MAP_TYPE:
@@ -1435,9 +1390,11 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case EXTERNAL_FLOAT_ARRAY_TYPE:
case EXTERNAL_DOUBLE_ARRAY_TYPE:
break;
- case SHARED_FUNCTION_INFO_TYPE:
- SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
+ case SHARED_FUNCTION_INFO_TYPE: {
+ SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(this);
+ shared->SharedFunctionInfoIterateBody(v);
break;
+ }
#define MAKE_STRUCT_CASE(NAME, Name, name) \
case NAME##_TYPE:
@@ -1656,8 +1613,6 @@ MaybeObject* JSObject::AddConstantFunctionProperty(
String* name,
JSFunction* function,
PropertyAttributes attributes) {
- ASSERT(!GetHeap()->InNewSpace(function));
-
// Allocate new instance descriptors with (name, function) added
ConstantFunctionDescriptor d(name, function, attributes);
Object* new_descriptors;
@@ -1760,7 +1715,7 @@ MaybeObject* JSObject::AddProperty(String* name,
Heap* heap = GetHeap();
if (!map_of_this->is_extensible()) {
if (strict_mode == kNonStrictMode) {
- return heap->undefined_value();
+ return value;
} else {
Handle<Object> args[1] = {Handle<String>(name)};
return heap->isolate()->Throw(
@@ -1772,7 +1727,7 @@ MaybeObject* JSObject::AddProperty(String* name,
// Ensure the descriptor array does not get too big.
if (map_of_this->instance_descriptors()->number_of_descriptors() <
DescriptorArray::kMaxNumberOfDescriptors) {
- if (value->IsJSFunction() && !heap->InNewSpace(value)) {
+ if (value->IsJSFunction()) {
return AddConstantFunctionProperty(name,
JSFunction::cast(value),
attributes);
@@ -1827,7 +1782,7 @@ MaybeObject* JSObject::ReplaceSlowProperty(String* name,
int new_enumeration_index = 0; // 0 means "Use the next available index."
if (old_index != -1) {
// All calls to ReplaceSlowProperty have had all transitions removed.
- ASSERT(!dictionary->DetailsAt(old_index).IsTransition());
+ ASSERT(!dictionary->ContainsTransition(old_index));
new_enumeration_index = dictionary->DetailsAt(old_index).index();
}
@@ -1977,13 +1932,23 @@ MaybeObject* JSObject::SetPropertyWithInterceptor(
}
+Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode) {
+ CALL_HEAP_FUNCTION(object->GetIsolate(),
+ object->SetProperty(*key, *value, attributes, strict_mode),
+ Object);
+}
+
+
MaybeObject* JSReceiver::SetProperty(String* name,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode,
- bool skip_fallback_interceptor) {
+ StrictModeFlag strict_mode) {
LookupResult result(GetIsolate());
- LocalLookup(name, &result, skip_fallback_interceptor);
+ LocalLookup(name, &result);
return SetProperty(&result, name, value, attributes, strict_mode);
}
@@ -2035,8 +2000,8 @@ MaybeObject* JSObject::SetPropertyWithCallback(Object* structure,
return *value_handle;
}
- if (structure->IsFixedArray()) {
- Object* setter = FixedArray::cast(structure)->get(kSetterIndex);
+ if (structure->IsAccessorPair()) {
+ Object* setter = AccessorPair::cast(structure)->setter();
if (setter->IsSpecFunction()) {
// TODO(rossberg): nicer would be to cast to some JSCallable here...
return SetPropertyWithDefinedSetter(JSReceiver::cast(setter), value);
@@ -2124,9 +2089,10 @@ MaybeObject* JSObject::SetElementWithCallbackSetterInPrototypes(
if (!JSObject::cast(pt)->HasDictionaryElements()) {
continue;
}
- NumberDictionary* dictionary = JSObject::cast(pt)->element_dictionary();
+ SeededNumberDictionary* dictionary =
+ JSObject::cast(pt)->element_dictionary();
int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
PropertyDetails details = dictionary->DetailsAt(entry);
if (details.type() == CALLBACKS) {
*found = true;
@@ -2359,7 +2325,9 @@ Object* Map::GetDescriptorContents(String* sentinel_name,
if (details.type() == ELEMENTS_TRANSITION) {
return descriptors->GetValue(index);
} else {
- *safe_to_add_transition = false;
+ if (safe_to_add_transition != NULL) {
+ *safe_to_add_transition = false;
+ }
}
}
return NULL;
@@ -2447,12 +2415,12 @@ Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object,
ElementsKind to_kind) {
Isolate* isolate = object->GetIsolate();
CALL_HEAP_FUNCTION(isolate,
- object->GetElementsTransitionMap(to_kind),
+ object->GetElementsTransitionMap(isolate, to_kind),
Map);
}
-MaybeObject* JSObject::GetElementsTransitionMap(ElementsKind to_kind) {
+MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) {
Map* current_map = map();
ElementsKind from_kind = current_map->elements_kind();
@@ -2494,9 +2462,9 @@ MaybeObject* JSObject::GetElementsTransitionMap(ElementsKind to_kind) {
// Only remember the map transition if the object's map is NOT equal to the
// global object_function's map and there is not an already existing
// non-matching element transition.
+ Context* global_context = GetIsolate()->context()->global_context();
bool allow_map_transition = safe_to_add_transition &&
- (GetIsolate()->context()->global_context()->object_function()->map() !=
- map());
+ (global_context->object_function()->map() != map());
if (allow_map_transition) {
MaybeObject* maybe_transition =
current_map->AddElementsTransition(to_kind, new_map);
@@ -3012,7 +2980,6 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
ASSERT(target_descriptors->GetType(number) == CONSTANT_FUNCTION);
JSFunction* function =
JSFunction::cast(target_descriptors->GetValue(number));
- ASSERT(!HEAP->InNewSpace(function));
if (value == function) {
set_map(target_map);
return value;
@@ -3024,10 +2991,11 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
case NULL_DESCRIPTOR:
case ELEMENTS_TRANSITION:
return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
- default:
+ case HANDLER:
UNREACHABLE();
+ return value;
}
- UNREACHABLE();
+ UNREACHABLE(); // keep the compiler happy
return value;
}
@@ -3041,6 +3009,18 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
// Note that this method cannot be used to set the prototype of a function
// because ConvertDescriptorToField() which is called in "case CALLBACKS:"
// doesn't handle function prototypes correctly.
+Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes(
+ Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes) {
+ CALL_HEAP_FUNCTION(
+ object->GetIsolate(),
+ object->SetLocalPropertyIgnoreAttributes(*key, *value, attributes),
+ Object);
+}
+
+
MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
String* name,
Object* value,
@@ -3112,10 +3092,11 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
case NULL_DESCRIPTOR:
case ELEMENTS_TRANSITION:
return ConvertDescriptorToFieldAndMapTransition(name, value, attributes);
- default:
+ case HANDLER:
UNREACHABLE();
+ return value;
}
- UNREACHABLE();
+ UNREACHABLE(); // keep the compiler happy
return value;
}
@@ -3330,6 +3311,15 @@ MaybeObject* JSObject::UpdateMapCodeCache(String* name, Code* code) {
}
+void JSObject::NormalizeProperties(Handle<JSObject> object,
+ PropertyNormalizationMode mode,
+ int expected_additional_properties) {
+ CALL_HEAP_FUNCTION_VOID(object->GetIsolate(),
+ object->NormalizeProperties(
+ mode, expected_additional_properties));
+}
+
+
MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int expected_additional_properties) {
if (!HasFastProperties()) return this;
@@ -3348,12 +3338,10 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
} else {
property_count += 2; // Make space for two more properties.
}
- Object* obj;
- { MaybeObject* maybe_obj =
- StringDictionary::Allocate(property_count);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ StringDictionary* dictionary;
+ { MaybeObject* maybe_dictionary = StringDictionary::Allocate(property_count);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
}
- StringDictionary* dictionary = StringDictionary::cast(obj);
DescriptorArray* descs = map_of_this->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -3363,36 +3351,31 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = descs->GetConstantFunction(i);
- Object* result;
- { MaybeObject* maybe_result =
- dictionary->Add(descs->GetKey(i), value, d);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- dictionary = StringDictionary::cast(result);
+ MaybeObject* maybe_dictionary =
+ dictionary->Add(descs->GetKey(i), value, d);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
break;
}
case FIELD: {
PropertyDetails d =
PropertyDetails(details.attributes(), NORMAL, details.index());
Object* value = FastPropertyAt(descs->GetFieldIndex(i));
- Object* result;
- { MaybeObject* maybe_result =
- dictionary->Add(descs->GetKey(i), value, d);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- dictionary = StringDictionary::cast(result);
+ MaybeObject* maybe_dictionary =
+ dictionary->Add(descs->GetKey(i), value, d);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
break;
}
case CALLBACKS: {
- PropertyDetails d =
- PropertyDetails(details.attributes(), CALLBACKS, details.index());
+ if (!descs->IsProperty(i)) break;
Object* value = descs->GetCallbacksObject(i);
- Object* result;
- { MaybeObject* maybe_result =
- dictionary->Add(descs->GetKey(i), value, d);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ if (value->IsAccessorPair()) {
+ MaybeObject* maybe_copy =
+ AccessorPair::cast(value)->CopyWithoutTransitions();
+ if (!maybe_copy->To(&value)) return maybe_copy;
}
- dictionary = StringDictionary::cast(result);
+ MaybeObject* maybe_dictionary =
+ dictionary->Add(descs->GetKey(i), value, details);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
break;
}
case MAP_TRANSITION:
@@ -3401,8 +3384,10 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
case INTERCEPTOR:
case ELEMENTS_TRANSITION:
break;
- default:
+ case HANDLER:
+ case NORMAL:
UNREACHABLE();
+ break;
}
}
@@ -3412,12 +3397,12 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
int index = map_of_this->instance_descriptors()->NextEnumerationIndex();
dictionary->SetNextEnumerationIndex(index);
- { MaybeObject* maybe_obj =
+ Map* new_map;
+ { MaybeObject* maybe_map =
current_heap->isolate()->context()->global_context()->
normalized_map_cache()->Get(this, mode);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ if (!maybe_map->To(&new_map)) return maybe_map;
}
- Map* new_map = Map::cast(obj);
// We have now successfully allocated all the necessary objects.
// Changes can now be made with the guarantee that all of them take effect.
@@ -3429,7 +3414,8 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
current_heap->CreateFillerObjectAt(this->address() + new_instance_size,
instance_size_delta);
if (Marking::IsBlack(Marking::MarkBitFrom(this))) {
- MemoryChunk::IncrementLiveBytes(this->address(), -instance_size_delta);
+ MemoryChunk::IncrementLiveBytesFromMutator(this->address(),
+ -instance_size_delta);
}
@@ -3450,6 +3436,14 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
}
+void JSObject::TransformToFastProperties(Handle<JSObject> object,
+ int unused_property_fields) {
+ CALL_HEAP_FUNCTION_VOID(
+ object->GetIsolate(),
+ object->TransformToFastProperties(unused_property_fields));
+}
+
+
MaybeObject* JSObject::TransformToFastProperties(int unused_property_fields) {
if (HasFastProperties()) return this;
ASSERT(!IsGlobalObject());
@@ -3458,6 +3452,14 @@ MaybeObject* JSObject::TransformToFastProperties(int unused_property_fields) {
}
+Handle<SeededNumberDictionary> JSObject::NormalizeElements(
+ Handle<JSObject> object) {
+ CALL_HEAP_FUNCTION(object->GetIsolate(),
+ object->NormalizeElements(),
+ SeededNumberDictionary);
+}
+
+
MaybeObject* JSObject::NormalizeElements() {
ASSERT(!HasExternalArrayElements());
@@ -3482,11 +3484,11 @@ MaybeObject* JSObject::NormalizeElements() {
int old_capacity = 0;
int used_elements = 0;
GetElementsCapacityAndUsage(&old_capacity, &used_elements);
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
{ Object* object;
- MaybeObject* maybe = NumberDictionary::Allocate(used_elements);
+ MaybeObject* maybe = SeededNumberDictionary::Allocate(used_elements);
if (!maybe->ToObject(&object)) return maybe;
- dictionary = NumberDictionary::cast(object);
+ dictionary = SeededNumberDictionary::cast(object);
}
// Copy the elements to the new backing store.
@@ -3517,7 +3519,7 @@ MaybeObject* JSObject::NormalizeElements() {
MaybeObject* maybe_result =
dictionary->AddNumberEntry(i, value, details);
if (!maybe_result->ToObject(&result)) return maybe_result;
- dictionary = NumberDictionary::cast(result);
+ dictionary = SeededNumberDictionary::cast(result);
}
}
@@ -3528,7 +3530,8 @@ MaybeObject* JSObject::NormalizeElements() {
// Set the new map first to satify the elements type assert in
// set_elements().
Object* new_map;
- MaybeObject* maybe = GetElementsTransitionMap(DICTIONARY_ELEMENTS);
+ MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(),
+ DICTIONARY_ELEMENTS);
if (!maybe->ToObject(&new_map)) return maybe;
set_map(Map::cast(new_map));
set_elements(dictionary);
@@ -3574,6 +3577,14 @@ MaybeObject* JSObject::SetIdentityHash(Object* hash, CreationFlag flag) {
}
+int JSObject::GetIdentityHash(Handle<JSObject> obj) {
+ CALL_AND_RETRY(obj->GetIsolate(),
+ obj->GetIdentityHash(ALLOW_CREATION),
+ return Smi::cast(__object__)->value(),
+ return 0);
+}
+
+
MaybeObject* JSObject::GetIdentityHash(CreationFlag flag) {
Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_symbol());
if (stored_value->IsSmi()) return stored_value;
@@ -3626,6 +3637,15 @@ Object* JSObject::GetHiddenProperty(String* key) {
}
+Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> obj,
+ Handle<String> key,
+ Handle<Object> value) {
+ CALL_HEAP_FUNCTION(obj->GetIsolate(),
+ obj->SetHiddenProperty(*key, *value),
+ Object);
+}
+
+
MaybeObject* JSObject::SetHiddenProperty(String* key, Object* value) {
if (IsJSGlobalProxy()) {
// For a proxy, use the prototype as target object.
@@ -3705,12 +3725,15 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
- descriptors->IsProperty(0)) {
- ASSERT(descriptors->GetType(0) == FIELD);
- Object* hidden_store =
- this->FastPropertyAt(descriptors->GetFieldIndex(0));
- return StringDictionary::cast(hidden_store);
+ (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
+ if (descriptors->GetType(0) == FIELD) {
+ Object* hidden_store =
+ this->FastPropertyAt(descriptors->GetFieldIndex(0));
+ return StringDictionary::cast(hidden_store);
+ } else {
+ ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
+ descriptors->GetType(0) == MAP_TRANSITION);
+ }
}
} else {
PropertyAttributes attributes;
@@ -3730,13 +3753,11 @@ MaybeObject* JSObject::GetHiddenPropertiesDictionary(bool create_if_absent) {
MaybeObject* dict_alloc = StringDictionary::Allocate(kInitialSize);
StringDictionary* dictionary;
if (!dict_alloc->To<StringDictionary>(&dictionary)) return dict_alloc;
- MaybeObject* store_result =
- SetPropertyPostInterceptor(GetHeap()->hidden_symbol(),
- dictionary,
- DONT_ENUM,
- kNonStrictMode);
- if (store_result->IsFailure()) return store_result;
- return dictionary;
+ // Using AddProperty or SetPropertyPostInterceptor here could fail, because
+ // object might be non-extensible.
+ return HasFastProperties()
+ ? AddFastProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM)
+ : AddSlowProperty(GetHeap()->hidden_symbol(), dictionary, DONT_ENUM);
}
@@ -3751,11 +3772,14 @@ MaybeObject* JSObject::SetHiddenPropertiesDictionary(
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = this->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == GetHeap()->hidden_symbol()) &&
- descriptors->IsProperty(0)) {
- ASSERT(descriptors->GetType(0) == FIELD);
- this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
- return this;
+ (descriptors->GetKey(0) == GetHeap()->hidden_symbol())) {
+ if (descriptors->GetType(0) == FIELD) {
+ this->FastPropertyAtPut(descriptors->GetFieldIndex(0), dictionary);
+ return this;
+ } else {
+ ASSERT(descriptors->GetType(0) == NULL_DESCRIPTOR ||
+ descriptors->GetType(0) == MAP_TRANSITION);
+ }
}
}
MaybeObject* store_result =
@@ -3853,6 +3877,14 @@ MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) {
}
+Handle<Object> JSObject::DeleteElement(Handle<JSObject> obj,
+ uint32_t index) {
+ CALL_HEAP_FUNCTION(obj->GetIsolate(),
+ obj->DeleteElement(index, JSObject::NORMAL_DELETION),
+ Object);
+}
+
+
MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
Isolate* isolate = GetIsolate();
// Check access rights if needed.
@@ -3881,19 +3913,11 @@ MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
}
-MaybeObject* JSReceiver::DeleteProperty(String* name, DeleteMode mode) {
- if (IsJSProxy()) {
- return JSProxy::cast(this)->DeletePropertyWithHandler(name, mode);
- }
- return JSObject::cast(this)->DeleteProperty(name, mode);
-}
-
-
-MaybeObject* JSReceiver::DeleteElement(uint32_t index, DeleteMode mode) {
- if (IsJSProxy()) {
- return JSProxy::cast(this)->DeleteElementWithHandler(index, mode);
- }
- return JSObject::cast(this)->DeleteElement(index, mode);
+Handle<Object> JSObject::DeleteProperty(Handle<JSObject> obj,
+ Handle<String> prop) {
+ CALL_HEAP_FUNCTION(obj->GetIsolate(),
+ obj->DeleteProperty(*prop, JSObject::NORMAL_DELETION),
+ Object);
}
@@ -3954,6 +3978,22 @@ MaybeObject* JSObject::DeleteProperty(String* name, DeleteMode mode) {
}
+MaybeObject* JSReceiver::DeleteElement(uint32_t index, DeleteMode mode) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->DeleteElementWithHandler(index, mode);
+ }
+ return JSObject::cast(this)->DeleteElement(index, mode);
+}
+
+
+MaybeObject* JSReceiver::DeleteProperty(String* name, DeleteMode mode) {
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->DeletePropertyWithHandler(name, mode);
+ }
+ return JSObject::cast(this)->DeleteProperty(name, mode);
+}
+
+
bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object) {
@@ -3968,7 +4008,8 @@ bool JSObject::ReferencesObjectFromElements(FixedArray* elements,
if (!element->IsTheHole() && element == object) return true;
}
} else {
- Object* key = NumberDictionary::cast(elements)->SlowReverseLookup(object);
+ Object* key =
+ SeededNumberDictionary::cast(elements)->SlowReverseLookup(object);
if (!key->IsUndefined()) return true;
}
return false;
@@ -4080,6 +4121,11 @@ bool JSObject::ReferencesObject(Object* obj) {
}
+Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) {
+ CALL_HEAP_FUNCTION(object->GetIsolate(), object->PreventExtensions(), Object);
+}
+
+
MaybeObject* JSObject::PreventExtensions() {
Isolate* isolate = GetIsolate();
if (IsAccessCheckNeeded() &&
@@ -4109,9 +4155,9 @@ MaybeObject* JSObject::PreventExtensions() {
}
// If there are fast elements we normalize.
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
{ MaybeObject* maybe = NormalizeElements();
- if (!maybe->To<NumberDictionary>(&dictionary)) return maybe;
+ if (!maybe->To<SeededNumberDictionary>(&dictionary)) return maybe;
}
ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
// Make sure that we never go back to fast case.
@@ -4157,11 +4203,14 @@ bool JSReceiver::IsSimpleEnum() {
}
-int Map::NumberOfDescribedProperties() {
+int Map::NumberOfDescribedProperties(PropertyAttributes filter) {
int result = 0;
DescriptorArray* descs = instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- if (descs->IsProperty(i)) result++;
+ PropertyDetails details(descs->GetDetails(i));
+ if (descs->IsProperty(i) && (details.attributes() & filter) == 0) {
+ result++;
+ }
}
return result;
}
@@ -4202,8 +4251,7 @@ AccessorDescriptor* Map::FindAccessor(String* name) {
}
-void JSReceiver::LocalLookup(String* name, LookupResult* result,
- bool skip_fallback_interceptor) {
+void JSReceiver::LocalLookup(String* name, LookupResult* result) {
ASSERT(name->IsString());
Heap* heap = GetHeap();
@@ -4235,31 +4283,23 @@ void JSReceiver::LocalLookup(String* name, LookupResult* result,
}
// Check for lookup interceptor except when bootstrapping.
- bool wouldIntercept = js_object->HasNamedInterceptor() &&
- !heap->isolate()->bootstrapper()->IsActive();
- if (wouldIntercept && !map()->named_interceptor_is_fallback()) {
+ if (js_object->HasNamedInterceptor() &&
+ !heap->isolate()->bootstrapper()->IsActive()) {
result->InterceptorResult(js_object);
return;
}
js_object->LocalLookupRealNamedProperty(name, result);
-
- if (wouldIntercept && !skip_fallback_interceptor && !result->IsProperty() &&
- map()->named_interceptor_is_fallback()) {
- result->InterceptorResult(js_object);
- return;
- }
}
-void JSReceiver::Lookup(String* name, LookupResult* result,
- bool skip_fallback_interceptor) {
+void JSReceiver::Lookup(String* name, LookupResult* result) {
// Ecma-262 3rd 8.6.2.4
Heap* heap = GetHeap();
for (Object* current = this;
current != heap->null_value();
current = JSObject::cast(current)->GetPrototype()) {
- JSReceiver::cast(current)->LocalLookup(name, result, skip_fallback_interceptor);
+ JSReceiver::cast(current)->LocalLookup(name, result);
if (result->IsProperty()) return;
}
result->NotFound();
@@ -4273,132 +4313,135 @@ void JSObject::LookupCallback(String* name, LookupResult* result) {
current != heap->null_value() && current->IsJSObject();
current = JSObject::cast(current)->GetPrototype()) {
JSObject::cast(current)->LocalLookupRealNamedProperty(name, result);
- if (result->IsProperty() && result->type() == CALLBACKS) return;
+ if (result->IsFound() && result->type() == CALLBACKS) return;
}
result->NotFound();
}
-// Search for a getter or setter in an elements dictionary and update its
-// attributes. Returns either undefined if the element is read-only, or the
-// getter/setter pair (fixed array) if there is an existing one, or the hole
-// value if the element does not exist or is a normal non-getter/setter data
-// element.
-static Object* UpdateGetterSetterInDictionary(NumberDictionary* dictionary,
- uint32_t index,
- PropertyAttributes attributes,
- Heap* heap) {
+// Try to update an accessor in an elements dictionary. Return true if the
+// update succeeded, and false otherwise.
+static bool UpdateGetterSetterInDictionary(
+ SeededNumberDictionary* dictionary,
+ uint32_t index,
+ Object* getter,
+ Object* setter,
+ PropertyAttributes attributes) {
int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
Object* result = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.IsReadOnly()) return heap->undefined_value();
- if (details.type() == CALLBACKS && result->IsFixedArray()) {
+ if (details.type() == CALLBACKS && result->IsAccessorPair()) {
+ ASSERT(!details.IsDontDelete());
if (details.attributes() != attributes) {
dictionary->DetailsAtPut(entry,
PropertyDetails(attributes, CALLBACKS, index));
}
- return result;
+ AccessorPair::cast(result)->SetComponents(getter, setter);
+ return true;
}
}
- return heap->the_hole_value();
+ return false;
}
-MaybeObject* JSObject::DefineGetterSetter(String* name,
- PropertyAttributes attributes) {
- Heap* heap = GetHeap();
- // Make sure that the top context does not change when doing callbacks or
- // interceptor calls.
- AssertNoContextChange ncc;
-
- // Try to flatten before operating on the string.
- name->TryFlatten();
-
- if (!CanSetCallback(name)) {
- return heap->undefined_value();
- }
-
- uint32_t index = 0;
- bool is_element = name->AsArrayIndex(&index);
-
- if (is_element) {
- switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
- break;
- case EXTERNAL_PIXEL_ELEMENTS:
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- // Ignore getters and setters on pixel and external array
- // elements.
- return heap->undefined_value();
- case DICTIONARY_ELEMENTS: {
- Object* probe = UpdateGetterSetterInDictionary(element_dictionary(),
- index,
- attributes,
- heap);
- if (!probe->IsTheHole()) return probe;
- // Otherwise allow to override it.
- break;
+MaybeObject* JSObject::DefineElementAccessor(uint32_t index,
+ Object* getter,
+ Object* setter,
+ PropertyAttributes attributes) {
+ switch (GetElementsKind()) {
+ case FAST_SMI_ONLY_ELEMENTS:
+ case FAST_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
+ break;
+ case EXTERNAL_PIXEL_ELEMENTS:
+ case EXTERNAL_BYTE_ELEMENTS:
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case EXTERNAL_SHORT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case EXTERNAL_INT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case EXTERNAL_FLOAT_ELEMENTS:
+ case EXTERNAL_DOUBLE_ELEMENTS:
+ // Ignore getters and setters on pixel and external array elements.
+ return GetHeap()->undefined_value();
+ case DICTIONARY_ELEMENTS:
+ if (UpdateGetterSetterInDictionary(element_dictionary(),
+ index,
+ getter,
+ setter,
+ attributes)) {
+ return GetHeap()->undefined_value();
}
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- // Ascertain whether we have read-only properties or an existing
- // getter/setter pair in an arguments elements dictionary backing
- // store.
- FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- Object* probe =
- index < (length - 2) ? parameter_map->get(index + 2) : NULL;
- if (probe == NULL || probe->IsTheHole()) {
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- NumberDictionary* dictionary = NumberDictionary::cast(arguments);
- probe = UpdateGetterSetterInDictionary(dictionary,
- index,
- attributes,
- heap);
- if (!probe->IsTheHole()) return probe;
+ break;
+ case NON_STRICT_ARGUMENTS_ELEMENTS: {
+ // Ascertain whether we have read-only properties or an existing
+ // getter/setter pair in an arguments elements dictionary backing
+ // store.
+ FixedArray* parameter_map = FixedArray::cast(elements());
+ uint32_t length = parameter_map->length();
+ Object* probe =
+ index < (length - 2) ? parameter_map->get(index + 2) : NULL;
+ if (probe == NULL || probe->IsTheHole()) {
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ if (arguments->IsDictionary()) {
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(arguments);
+ if (UpdateGetterSetterInDictionary(dictionary,
+ index,
+ getter,
+ setter,
+ attributes)) {
+ return GetHeap()->undefined_value();
}
}
- break;
}
+ break;
}
- } else {
- // Lookup the name.
- LookupResult result(heap->isolate());
- LocalLookup(name, &result);
- if (result.IsProperty()) {
- if (result.IsReadOnly()) return heap->undefined_value();
- if (result.type() == CALLBACKS) {
- Object* obj = result.GetCallbackObject();
- // Need to preserve old getters/setters.
- if (obj->IsFixedArray()) {
- // Use set to update attributes.
- return SetPropertyCallback(name, obj, attributes);
+ }
+
+ AccessorPair* accessors;
+ { MaybeObject* maybe_accessors = GetHeap()->AllocateAccessorPair();
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
+ }
+ accessors->SetComponents(getter, setter);
+
+ return SetElementCallback(index, accessors, attributes);
+}
+
+
+MaybeObject* JSObject::DefinePropertyAccessor(String* name,
+ Object* getter,
+ Object* setter,
+ PropertyAttributes attributes) {
+ // Lookup the name.
+ LookupResult result(GetHeap()->isolate());
+ LocalLookupRealNamedProperty(name, &result);
+ if (result.IsFound()) {
+ if (result.type() == CALLBACKS) {
+ ASSERT(!result.IsDontDelete());
+ Object* obj = result.GetCallbackObject();
+ // Need to preserve old getters/setters.
+ if (obj->IsAccessorPair()) {
+ AccessorPair* copy;
+ { MaybeObject* maybe_copy =
+ AccessorPair::cast(obj)->CopyWithoutTransitions();
+ if (!maybe_copy->To(&copy)) return maybe_copy;
}
+ copy->SetComponents(getter, setter);
+ // Use set to update attributes.
+ return SetPropertyCallback(name, copy, attributes);
}
}
}
- // Allocate the fixed array to hold getter and setter.
- Object* structure;
- { MaybeObject* maybe_structure = heap->AllocateFixedArray(2, TENURED);
- if (!maybe_structure->ToObject(&structure)) return maybe_structure;
+ AccessorPair* accessors;
+ { MaybeObject* maybe_accessors = GetHeap()->AllocateAccessorPair();
+ if (!maybe_accessors->To(&accessors)) return maybe_accessors;
}
+ accessors->SetComponents(getter, setter);
- if (is_element) {
- return SetElementCallback(index, structure, attributes);
- } else {
- return SetPropertyCallback(name, structure, attributes);
- }
+ return SetPropertyCallback(name, accessors, attributes);
}
@@ -4432,19 +4475,15 @@ MaybeObject* JSObject::SetElementCallback(uint32_t index,
PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
// Normalize elements to make this operation simple.
- NumberDictionary* dictionary = NULL;
- { Object* result;
- MaybeObject* maybe = NormalizeElements();
- if (!maybe->ToObject(&result)) return maybe;
- dictionary = NumberDictionary::cast(result);
+ SeededNumberDictionary* dictionary;
+ { MaybeObject* maybe_dictionary = NormalizeElements();
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
}
ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
// Update the dictionary with the new CALLBACKS property.
- { Object* result;
- MaybeObject* maybe = dictionary->Set(index, structure, details);
- if (!maybe->ToObject(&result)) return maybe;
- dictionary = NumberDictionary::cast(result);
+ { MaybeObject* maybe_dictionary = dictionary->Set(index, structure, details);
+ if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary;
}
dictionary->set_requires_slow_elements();
@@ -4456,8 +4495,7 @@ MaybeObject* JSObject::SetElementCallback(uint32_t index,
// switch to a direct backing store without the parameter map. This
// would allow GC of the context.
FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- if (index < length - 2) {
+ if (index < static_cast<uint32_t>(parameter_map->length()) - 2) {
parameter_map->set(index + 2, GetHeap()->the_hole_value());
}
parameter_map->set(1, dictionary);
@@ -4465,33 +4503,26 @@ MaybeObject* JSObject::SetElementCallback(uint32_t index,
set_elements(dictionary);
}
- return structure;
+ return GetHeap()->undefined_value();
}
MaybeObject* JSObject::SetPropertyCallback(String* name,
Object* structure,
PropertyAttributes attributes) {
- PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
-
- bool convert_back_to_fast = HasFastProperties() &&
- (map()->instance_descriptors()->number_of_descriptors()
- < DescriptorArray::kMaxNumberOfDescriptors);
-
// Normalize object to make this operation simple.
- Object* ok;
{ MaybeObject* maybe_ok = NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0);
- if (!maybe_ok->ToObject(&ok)) return maybe_ok;
+ if (maybe_ok->IsFailure()) return maybe_ok;
}
// For the global object allocate a new map to invalidate the global inline
// caches which have a global property cell reference directly in the code.
if (IsGlobalObject()) {
- Object* new_map;
+ Map* new_map;
{ MaybeObject* maybe_new_map = map()->CopyDropDescriptors();
- if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
}
- set_map(Map::cast(new_map));
+ set_map(new_map);
// When running crankshaft, changing the map is not enough. We
// need to deoptimize all functions that rely on this global
// object.
@@ -4499,24 +4530,29 @@ MaybeObject* JSObject::SetPropertyCallback(String* name,
}
// Update the dictionary with the new CALLBACKS property.
- Object* result;
- { MaybeObject* maybe_result = SetNormalizedProperty(name, structure, details);
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
+ { MaybeObject* maybe_ok = SetNormalizedProperty(name, structure, details);
+ if (maybe_ok->IsFailure()) return maybe_ok;
}
- if (convert_back_to_fast) {
- { MaybeObject* maybe_ok = TransformToFastProperties(0);
- if (!maybe_ok->ToObject(&ok)) return maybe_ok;
- }
- }
- return result;
+ return GetHeap()->undefined_value();
+}
+
+
+void JSObject::DefineAccessor(Handle<JSObject> object,
+ Handle<String> name,
+ Handle<Object> getter,
+ Handle<Object> setter,
+ PropertyAttributes attributes) {
+ CALL_HEAP_FUNCTION_VOID(
+ object->GetIsolate(),
+ object->DefineAccessor(*name, *getter, *setter, attributes));
}
MaybeObject* JSObject::DefineAccessor(String* name,
- bool is_getter,
- Object* fun,
+ Object* getter,
+ Object* setter,
PropertyAttributes attributes) {
- ASSERT(fun->IsSpecFunction() || fun->IsUndefined());
Isolate* isolate = GetIsolate();
// Check access rights if needed.
if (IsAccessCheckNeeded() &&
@@ -4529,17 +4565,23 @@ MaybeObject* JSObject::DefineAccessor(String* name,
Object* proto = GetPrototype();
if (proto->IsNull()) return this;
ASSERT(proto->IsJSGlobalObject());
- return JSObject::cast(proto)->DefineAccessor(name, is_getter,
- fun, attributes);
+ return JSObject::cast(proto)->DefineAccessor(
+ name, getter, setter, attributes);
}
- Object* array;
- { MaybeObject* maybe_array = DefineGetterSetter(name, attributes);
- if (!maybe_array->ToObject(&array)) return maybe_array;
- }
- if (array->IsUndefined()) return array;
- FixedArray::cast(array)->set(is_getter ? 0 : 1, fun);
- return this;
+ // Make sure that the top context does not change when doing callbacks or
+ // interceptor calls.
+ AssertNoContextChange ncc;
+
+ // Try to flatten before operating on the string.
+ name->TryFlatten();
+
+ if (!CanSetCallback(name)) return isolate->heap()->undefined_value();
+
+ uint32_t index = 0;
+ return name->AsArrayIndex(&index) ?
+ DefineElementAccessor(index, getter, setter, attributes) :
+ DefinePropertyAccessor(name, getter, setter, attributes);
}
@@ -4602,10 +4644,9 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
break;
}
- Object* ok;
{ MaybeObject* maybe_ok =
SetElementCallback(index, info, info->property_attributes());
- if (!maybe_ok->ToObject(&ok)) return maybe_ok;
+ if (maybe_ok->IsFailure()) return maybe_ok;
}
} else {
// Lookup the name.
@@ -4616,10 +4657,9 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
if (result.IsProperty() && (result.IsReadOnly() || result.IsDontDelete())) {
return isolate->heap()->undefined_value();
}
- Object* ok;
{ MaybeObject* maybe_ok =
SetPropertyCallback(name, info, info->property_attributes());
- if (!maybe_ok->ToObject(&ok)) return maybe_ok;
+ if (maybe_ok->IsFailure()) return maybe_ok;
}
}
@@ -4627,7 +4667,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
}
-Object* JSObject::LookupAccessor(String* name, bool is_getter) {
+Object* JSObject::LookupAccessor(String* name, AccessorComponent component) {
Heap* heap = GetHeap();
// Make sure that the top context does not change when doing callbacks or
@@ -4642,7 +4682,6 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
}
// Make the lookup and include prototypes.
- int accessor_index = is_getter ? kGetterIndex : kSetterIndex;
uint32_t index = 0;
if (name->AsArrayIndex(&index)) {
for (Object* obj = this;
@@ -4650,15 +4689,13 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
obj = JSObject::cast(obj)->GetPrototype()) {
JSObject* js_object = JSObject::cast(obj);
if (js_object->HasDictionaryElements()) {
- NumberDictionary* dictionary = js_object->element_dictionary();
+ SeededNumberDictionary* dictionary = js_object->element_dictionary();
int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- if (element->IsFixedArray()) {
- return FixedArray::cast(element)->get(accessor_index);
- }
+ if (dictionary->DetailsAt(entry).type() == CALLBACKS &&
+ element->IsAccessorPair()) {
+ return AccessorPair::cast(element)->GetComponent(component);
}
}
}
@@ -4673,8 +4710,8 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) {
if (result.IsReadOnly()) return heap->undefined_value();
if (result.type() == CALLBACKS) {
Object* obj = result.GetCallbackObject();
- if (obj->IsFixedArray()) {
- return FixedArray::cast(obj)->get(accessor_index);
+ if (obj->IsAccessorPair()) {
+ return AccessorPair::cast(obj)->GetComponent(component);
}
}
}
@@ -4848,75 +4885,219 @@ void Map::RemoveFromCodeCache(String* name, Code* code, int index) {
}
-void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
- // Traverse the transition tree without using a stack. We do this by
- // reversing the pointers in the maps and descriptor arrays.
- Map* current = this;
- Map* meta_map = GetHeap()->meta_map();
- Object** map_or_index_field = NULL;
- while (current != meta_map) {
- DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
- *RawField(current, Map::kInstanceDescriptorsOrBitField3Offset));
- if (!d->IsEmpty()) {
- FixedArray* contents = reinterpret_cast<FixedArray*>(
- d->get(DescriptorArray::kContentArrayIndex));
- map_or_index_field = RawField(contents, HeapObject::kMapOffset);
- Object* map_or_index = *map_or_index_field;
- bool map_done = true; // Controls a nested continue statement.
- for (int i = map_or_index->IsSmi() ? Smi::cast(map_or_index)->value() : 0;
- i < contents->length();
- i += 2) {
- PropertyDetails details(Smi::cast(contents->get(i + 1)));
- if (details.IsTransition()) {
- // Found a map in the transition array. We record our progress in
- // the transition array by recording the current map in the map field
- // of the next map and recording the index in the transition array in
- // the map field of the array.
- Map* next = Map::cast(contents->get(i));
- next->set_map_unsafe(current);
- *map_or_index_field = Smi::FromInt(i + 2);
- current = next;
- map_done = false;
+// An iterator over all map transitions in an descriptor array, reusing the map
+// field of the contens array while it is running.
+class IntrusiveMapTransitionIterator {
+ public:
+ explicit IntrusiveMapTransitionIterator(DescriptorArray* descriptor_array)
+ : descriptor_array_(descriptor_array) { }
+
+ void Start() {
+ ASSERT(!IsIterating());
+ if (HasContentArray()) *ContentHeader() = Smi::FromInt(0);
+ }
+
+ bool IsIterating() {
+ return HasContentArray() && (*ContentHeader())->IsSmi();
+ }
+
+ Map* Next() {
+ ASSERT(IsIterating());
+ FixedArray* contents = ContentArray();
+ // Attention, tricky index manipulation ahead: Every entry in the contents
+ // array consists of a value/details pair, so the index is typically even.
+ // An exception is made for CALLBACKS entries: An even index means we look
+ // at its getter, and an odd index means we look at its setter.
+ int index = Smi::cast(*ContentHeader())->value();
+ while (index < contents->length()) {
+ PropertyDetails details(Smi::cast(contents->get(index | 1)));
+ switch (details.type()) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ // We definitely have a map transition.
+ *ContentHeader() = Smi::FromInt(index + 2);
+ return static_cast<Map*>(contents->get(index));
+ case CALLBACKS: {
+ // We might have a map transition in a getter or in a setter.
+ AccessorPair* accessors =
+ static_cast<AccessorPair*>(contents->get(index & ~1));
+ Object* accessor =
+ ((index & 1) == 0) ? accessors->getter() : accessors->setter();
+ index++;
+ if (accessor->IsMap()) {
+ *ContentHeader() = Smi::FromInt(index);
+ return static_cast<Map*>(accessor);
+ }
break;
}
- }
- if (!map_done) continue;
- } else {
- map_or_index_field = NULL;
- }
- // That was the regular transitions, now for the prototype transitions.
- FixedArray* prototype_transitions =
- current->unchecked_prototype_transitions();
- Object** proto_map_or_index_field =
- RawField(prototype_transitions, HeapObject::kMapOffset);
- Object* map_or_index = *proto_map_or_index_field;
- const int start = kProtoTransitionHeaderSize + kProtoTransitionMapOffset;
- int i = map_or_index->IsSmi() ? Smi::cast(map_or_index)->value() : start;
- if (i < prototype_transitions->length()) {
- // Found a map in the prototype transition array. Record progress in
- // an analogous way to the regular transitions array above.
- Object* perhaps_map = prototype_transitions->get(i);
- if (perhaps_map->IsMap()) {
- Map* next = Map::cast(perhaps_map);
- next->set_map_unsafe(current);
- *proto_map_or_index_field =
- Smi::FromInt(i + kProtoTransitionElementsPerEntry);
- current = next;
- continue;
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ // We definitely have no map transition.
+ index += 2;
+ break;
}
}
- *proto_map_or_index_field = GetHeap()->fixed_array_map();
- if (map_or_index_field != NULL) {
- *map_or_index_field = GetHeap()->fixed_array_map();
+ *ContentHeader() = descriptor_array_->GetHeap()->fixed_array_map();
+ return NULL;
+ }
+
+ private:
+ bool HasContentArray() {
+ return descriptor_array_-> length() > DescriptorArray::kContentArrayIndex;
+ }
+
+ FixedArray* ContentArray() {
+ Object* array = descriptor_array_->get(DescriptorArray::kContentArrayIndex);
+ return static_cast<FixedArray*>(array);
+ }
+
+ Object** ContentHeader() {
+ return HeapObject::RawField(ContentArray(), DescriptorArray::kMapOffset);
+ }
+
+ DescriptorArray* descriptor_array_;
+};
+
+
+// An iterator over all prototype transitions, reusing the map field of the
+// underlying array while it is running.
+class IntrusivePrototypeTransitionIterator {
+ public:
+ explicit IntrusivePrototypeTransitionIterator(FixedArray* proto_trans)
+ : proto_trans_(proto_trans) { }
+
+ void Start() {
+ ASSERT(!IsIterating());
+ if (HasTransitions()) *Header() = Smi::FromInt(0);
+ }
+
+ bool IsIterating() {
+ return HasTransitions() && (*Header())->IsSmi();
+ }
+
+ Map* Next() {
+ ASSERT(IsIterating());
+ int transitionNumber = Smi::cast(*Header())->value();
+ if (transitionNumber < NumberOfTransitions()) {
+ *Header() = Smi::FromInt(transitionNumber + 1);
+ return GetTransition(transitionNumber);
}
+ *Header() = proto_trans_->GetHeap()->fixed_array_map();
+ return NULL;
+ }
- // The callback expects a map to have a real map as its map, so we save
- // the map field, which is being used to track the traversal and put the
- // correct map (the meta_map) in place while we do the callback.
- Map* prev = current->map();
- current->set_map_unsafe(meta_map);
- callback(current, data);
- current = prev;
+ private:
+ bool HasTransitions() {
+ return proto_trans_->length() >= Map::kProtoTransitionHeaderSize;
+ }
+
+ Object** Header() {
+ return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset);
+ }
+
+ int NumberOfTransitions() {
+ Object* num = proto_trans_->get(Map::kProtoTransitionNumberOfEntriesOffset);
+ return Smi::cast(num)->value();
+ }
+
+ Map* GetTransition(int transitionNumber) {
+ return Map::cast(proto_trans_->get(IndexFor(transitionNumber)));
+ }
+
+ int IndexFor(int transitionNumber) {
+ return Map::kProtoTransitionHeaderSize +
+ Map::kProtoTransitionMapOffset +
+ transitionNumber * Map::kProtoTransitionElementsPerEntry;
+ }
+
+ FixedArray* proto_trans_;
+};
+
+
+// To traverse the transition tree iteratively, we have to store two kinds of
+// information in a map: The parent map in the traversal and which children of a
+// node have already been visited. To do this without additional memory, we
+// temporarily reuse two maps with known values:
+//
+// (1) The map of the map temporarily holds the parent, and is restored to the
+// meta map afterwards.
+//
+// (2) The info which children have already been visited depends on which part
+// of the map we currently iterate:
+//
+// (a) If we currently follow normal map transitions, we temporarily store
+// the current index in the map of the FixedArray of the desciptor
+// array's contents, and restore it to the fixed array map afterwards.
+// Note that a single descriptor can have 0, 1, or 2 transitions.
+//
+// (b) If we currently follow prototype transitions, we temporarily store
+// the current index in the map of the FixedArray holding the prototype
+// transitions, and restore it to the fixed array map afterwards.
+//
+// Note that the child iterator is just a concatenation of two iterators: One
+// iterating over map transitions and one iterating over prototype transisitons.
+class TraversableMap : public Map {
+ public:
+ // Record the parent in the traversal within this map. Note that this destroys
+ // this map's map!
+ void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); }
+
+ // Reset the current map's map, returning the parent previously stored in it.
+ TraversableMap* GetAndResetParent() {
+ TraversableMap* old_parent = static_cast<TraversableMap*>(map());
+ set_map_no_write_barrier(GetHeap()->meta_map());
+ return old_parent;
+ }
+
+ // Start iterating over this map's children, possibly destroying a FixedArray
+ // map (see explanation above).
+ void ChildIteratorStart() {
+ IntrusiveMapTransitionIterator(instance_descriptors()).Start();
+ IntrusivePrototypeTransitionIterator(
+ unchecked_prototype_transitions()).Start();
+ }
+
+ // If we have an unvisited child map, return that one and advance. If we have
+ // none, return NULL and reset any destroyed FixedArray maps.
+ TraversableMap* ChildIteratorNext() {
+ IntrusiveMapTransitionIterator descriptor_iterator(instance_descriptors());
+ if (descriptor_iterator.IsIterating()) {
+ Map* next = descriptor_iterator.Next();
+ if (next != NULL) return static_cast<TraversableMap*>(next);
+ }
+ IntrusivePrototypeTransitionIterator
+ proto_iterator(unchecked_prototype_transitions());
+ if (proto_iterator.IsIterating()) {
+ Map* next = proto_iterator.Next();
+ if (next != NULL) return static_cast<TraversableMap*>(next);
+ }
+ return NULL;
+ }
+};
+
+
+// Traverse the transition tree in postorder without using the C++ stack by
+// doing pointer reversal.
+void Map::TraverseTransitionTree(TraverseCallback callback, void* data) {
+ TraversableMap* current = static_cast<TraversableMap*>(this);
+ current->ChildIteratorStart();
+ while (true) {
+ TraversableMap* child = current->ChildIteratorNext();
+ if (child != NULL) {
+ child->ChildIteratorStart();
+ child->SetParent(current);
+ current = child;
+ } else {
+ TraversableMap* parent = current->GetAndResetParent();
+ callback(current, data);
+ if (current == this) break;
+ current = parent;
+ }
}
}
@@ -5188,8 +5369,8 @@ int CodeCacheHashTable::GetIndex(String* name, Code::Flags flags) {
void CodeCacheHashTable::RemoveByIndex(int index) {
ASSERT(index >= 0);
Heap* heap = GetHeap();
- set(EntryToIndex(index), heap->null_value());
- set(EntryToIndex(index) + 1, heap->null_value());
+ set(EntryToIndex(index), heap->the_hole_value());
+ set(EntryToIndex(index) + 1, heap->the_hole_value());
ElementRemoved();
}
@@ -5270,7 +5451,7 @@ class PolymorphicCodeCacheHashTableKey : public HashTableKey {
for (int i = 0; i < maps_->length(); ++i) {
bool match_found = false;
for (int j = 0; j < other_maps.length(); ++j) {
- if (maps_->at(i)->EquivalentTo(*other_maps.at(j))) {
+ if (*(maps_->at(i)) == *(other_maps.at(j))) {
match_found = true;
break;
}
@@ -5368,7 +5549,7 @@ MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps,
MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
ElementsAccessor* accessor = array->GetElementsAccessor();
MaybeObject* maybe_result =
- accessor->AddElementsToFixedArray(array->elements(), this, array, array);
+ accessor->AddElementsToFixedArray(array, array, this);
FixedArray* result;
if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
#ifdef DEBUG
@@ -5386,7 +5567,7 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
MaybeObject* maybe_result =
- accessor->AddElementsToFixedArray(other, this, NULL, NULL);
+ accessor->AddElementsToFixedArray(NULL, NULL, this, other);
FixedArray* result;
if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
#ifdef DEBUG
@@ -5413,7 +5594,9 @@ MaybeObject* FixedArray::CopySize(int new_length) {
AssertNoAllocation no_gc;
int len = length();
if (new_length < len) len = new_length;
- result->set_map(map());
+ // We are taking the map from the old fixed array so the map is sure to
+ // be an immortal immutable object.
+ result->set_map_no_write_barrier(map());
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len; i++) {
result->set(i, get(i), mode);
@@ -5470,15 +5653,21 @@ MaybeObject* DescriptorArray::Allocate(int number_of_descriptors) {
void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
- FixedArray* new_cache) {
+ FixedArray* new_cache,
+ Object* new_index_cache) {
ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength);
+ ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray());
if (HasEnumCache()) {
FixedArray::cast(get(kEnumerationIndexIndex))->
set(kEnumCacheBridgeCacheIndex, new_cache);
+ FixedArray::cast(get(kEnumerationIndexIndex))->
+ set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
} else {
if (IsEmpty()) return; // Do nothing for empty descriptor array.
FixedArray::cast(bridge_storage)->
set(kEnumCacheBridgeCacheIndex, new_cache);
+ FixedArray::cast(bridge_storage)->
+ set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache);
NoWriteBarrierSet(FixedArray::cast(bridge_storage),
kEnumCacheBridgeEnumIndex,
get(kEnumerationIndexIndex));
@@ -5487,6 +5676,38 @@ void DescriptorArray::SetEnumCache(FixedArray* bridge_storage,
}
+static bool InsertionPointFound(String* key1, String* key2) {
+ return key1->Hash() > key2->Hash() || key1 == key2;
+}
+
+
+void DescriptorArray::CopyFrom(Handle<DescriptorArray> dst,
+ int dst_index,
+ Handle<DescriptorArray> src,
+ int src_index,
+ const WhitenessWitness& witness) {
+ CALL_HEAP_FUNCTION_VOID(dst->GetIsolate(),
+ dst->CopyFrom(dst_index, *src, src_index, witness));
+}
+
+
+MaybeObject* DescriptorArray::CopyFrom(int dst_index,
+ DescriptorArray* src,
+ int src_index,
+ const WhitenessWitness& witness) {
+ Object* value = src->GetValue(src_index);
+ PropertyDetails details(src->GetDetails(src_index));
+ if (details.type() == CALLBACKS && value->IsAccessorPair()) {
+ MaybeObject* maybe_copy =
+ AccessorPair::cast(value)->CopyWithoutTransitions();
+ if (!maybe_copy->To(&value)) return maybe_copy;
+ }
+ Descriptor desc(src->GetKey(src_index), value, details);
+ Set(dst_index, &desc, witness);
+ return this;
+}
+
+
MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag) {
// Transitions are only kept when inserting another transition.
@@ -5495,38 +5716,26 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// Conversely, we filter after replacing, so replacing a transition and
// removing all other transitions is not supported.
bool remove_transitions = transition_flag == REMOVE_TRANSITIONS;
- ASSERT(remove_transitions == !descriptor->GetDetails().IsTransition());
+ ASSERT(remove_transitions == !descriptor->ContainsTransition());
ASSERT(descriptor->GetDetails().type() != NULL_DESCRIPTOR);
// Ensure the key is a symbol.
- Object* result;
{ MaybeObject* maybe_result = descriptor->KeyToSymbol();
- if (!maybe_result->ToObject(&result)) return maybe_result;
+ if (maybe_result->IsFailure()) return maybe_result;
}
- int transitions = 0;
- int null_descriptors = 0;
- if (remove_transitions) {
- for (int i = 0; i < number_of_descriptors(); i++) {
- if (IsTransition(i)) transitions++;
- if (IsNullDescriptor(i)) null_descriptors++;
- }
- } else {
- for (int i = 0; i < number_of_descriptors(); i++) {
- if (IsNullDescriptor(i)) null_descriptors++;
- }
+ int new_size = 0;
+ for (int i = 0; i < number_of_descriptors(); i++) {
+ if (IsNullDescriptor(i)) continue;
+ if (remove_transitions && IsTransitionOnly(i)) continue;
+ new_size++;
}
- int new_size = number_of_descriptors() - transitions - null_descriptors;
// If key is in descriptor, we replace it in-place when filtering.
// Count a null descriptor for key as inserted, not replaced.
int index = Search(descriptor->GetKey());
- const bool inserting = (index == kNotFound);
- const bool replacing = !inserting;
+ const bool replacing = (index != kNotFound);
bool keep_enumeration_index = false;
- if (inserting) {
- ++new_size;
- }
if (replacing) {
// We are replacing an existing descriptor. We keep the enumeration
// index of a visible property.
@@ -5541,13 +5750,13 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// a transition that will be replaced. Adjust count in this case.
++new_size;
}
+ } else {
+ ++new_size;
}
DescriptorArray* new_descriptors;
{ MaybeObject* maybe_result = Allocate(new_size);
- if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
- return maybe_result;
- }
+ if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
DescriptorArray::WhitenessWitness witness(new_descriptors);
@@ -5555,7 +5764,7 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// Set the enumeration index in the descriptors and set the enumeration index
// in the result.
int enumeration_index = NextEnumerationIndex();
- if (!descriptor->GetDetails().IsTransition()) {
+ if (!descriptor->ContainsTransition()) {
if (keep_enumeration_index) {
descriptor->SetEnumerationIndex(
PropertyDetails(GetDetails(index)).index());
@@ -5568,28 +5777,26 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
// Copy the descriptors, filtering out transitions and null descriptors,
// and inserting or replacing a descriptor.
- uint32_t descriptor_hash = descriptor->GetKey()->Hash();
- int from_index = 0;
int to_index = 0;
-
- for (; from_index < number_of_descriptors(); from_index++) {
- String* key = GetKey(from_index);
- if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) {
- break;
+ int insertion_index = -1;
+ int from_index = 0;
+ while (from_index < number_of_descriptors()) {
+ if (insertion_index < 0 &&
+ InsertionPointFound(GetKey(from_index), descriptor->GetKey())) {
+ insertion_index = to_index++;
+ if (replacing) from_index++;
+ } else {
+ if (!(IsNullDescriptor(from_index) ||
+ (remove_transitions && IsTransitionOnly(from_index)))) {
+ MaybeObject* copy_result =
+ new_descriptors->CopyFrom(to_index++, this, from_index, witness);
+ if (copy_result->IsFailure()) return copy_result;
+ }
+ from_index++;
}
- if (IsNullDescriptor(from_index)) continue;
- if (remove_transitions && IsTransition(from_index)) continue;
- new_descriptors->CopyFrom(to_index++, this, from_index, witness);
- }
-
- new_descriptors->Set(to_index++, descriptor, witness);
- if (replacing) from_index++;
-
- for (; from_index < number_of_descriptors(); from_index++) {
- if (IsNullDescriptor(from_index)) continue;
- if (remove_transitions && IsTransition(from_index)) continue;
- new_descriptors->CopyFrom(to_index++, this, from_index, witness);
}
+ if (insertion_index < 0) insertion_index = to_index++;
+ new_descriptors->Set(insertion_index, descriptor, witness);
ASSERT(to_index == new_descriptors->number_of_descriptors());
SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates());
@@ -5599,31 +5806,24 @@ MaybeObject* DescriptorArray::CopyInsert(Descriptor* descriptor,
MaybeObject* DescriptorArray::RemoveTransitions() {
- // Remove all transitions and null descriptors. Return a copy of the array
- // with all transitions removed, or a Failure object if the new array could
- // not be allocated.
-
- // Compute the size of the map transition entries to be removed.
- int num_removed = 0;
+ // Allocate the new descriptor array.
+ int new_number_of_descriptors = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
- if (!IsProperty(i)) num_removed++;
+ if (IsProperty(i)) new_number_of_descriptors++;
}
-
- // Allocate the new descriptor array.
DescriptorArray* new_descriptors;
- { MaybeObject* maybe_result = Allocate(number_of_descriptors() - num_removed);
- if (!maybe_result->To<DescriptorArray>(&new_descriptors)) {
- return maybe_result;
- }
+ { MaybeObject* maybe_result = Allocate(new_number_of_descriptors);
+ if (!maybe_result->To(&new_descriptors)) return maybe_result;
}
- DescriptorArray::WhitenessWitness witness(new_descriptors);
-
// Copy the content.
+ DescriptorArray::WhitenessWitness witness(new_descriptors);
int next_descriptor = 0;
for (int i = 0; i < number_of_descriptors(); i++) {
if (IsProperty(i)) {
- new_descriptors->CopyFrom(next_descriptor++, this, i, witness);
+ MaybeObject* copy_result =
+ new_descriptors->CopyFrom(next_descriptor++, this, i, witness);
+ if (copy_result->IsFailure()) return copy_result;
}
}
ASSERT(next_descriptor == new_descriptors->number_of_descriptors());
@@ -5653,7 +5853,7 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
}
}
if (child_hash <= parent_hash) break;
- NoWriteBarrierSwapDescriptors(parent_index, child_index);
+ NoIncrementalWriteBarrierSwapDescriptors(parent_index, child_index);
// Now element at child_index could be < its children.
parent_index = child_index; // parent_hash remains correct.
}
@@ -5662,7 +5862,7 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
// Extract elements and create sorted array.
for (int i = len - 1; i > 0; --i) {
// Put max element at the back of the array.
- NoWriteBarrierSwapDescriptors(0, i);
+ NoIncrementalWriteBarrierSwapDescriptors(0, i);
// Shift down the new top element.
int parent_index = 0;
const uint32_t parent_hash = GetKey(parent_index)->Hash();
@@ -5678,7 +5878,7 @@ void DescriptorArray::SortUnchecked(const WhitenessWitness& witness) {
}
}
if (child_hash <= parent_hash) break;
- NoWriteBarrierSwapDescriptors(parent_index, child_index);
+ NoIncrementalWriteBarrierSwapDescriptors(parent_index, child_index);
parent_index = child_index;
}
}
@@ -5736,6 +5936,24 @@ int DescriptorArray::LinearSearch(String* name, int len) {
}
+MaybeObject* AccessorPair::CopyWithoutTransitions() {
+ Heap* heap = GetHeap();
+ AccessorPair* copy;
+ { MaybeObject* maybe_copy = heap->AllocateAccessorPair();
+ if (!maybe_copy->To(&copy)) return maybe_copy;
+ }
+ copy->set_getter(getter()->IsMap() ? heap->the_hole_value() : getter());
+ copy->set_setter(setter()->IsMap() ? heap->the_hole_value() : setter());
+ return copy;
+}
+
+
+Object* AccessorPair::GetComponent(AccessorComponent component) {
+ Object* accessor = (component == ACCESSOR_GETTER) ? getter() : setter();
+ return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor;
+}
+
+
MaybeObject* DeoptimizationInputData::Allocate(int deopt_entry_count,
PretenureFlag pretenure) {
ASSERT(deopt_entry_count > 0);
@@ -5797,7 +6015,7 @@ String::FlatContent String::GetFlatContent() {
if (shape.representation_tag() == kSeqStringTag) {
start = SeqAsciiString::cast(string)->GetChars();
} else {
- start = ExternalAsciiString::cast(string)->resource()->data();
+ start = ExternalAsciiString::cast(string)->GetChars();
}
return FlatContent(Vector<const char>(start + offset, length));
} else {
@@ -5806,7 +6024,7 @@ String::FlatContent String::GetFlatContent() {
if (shape.representation_tag() == kSeqStringTag) {
start = SeqTwoByteString::cast(string)->GetChars();
} else {
- start = ExternalTwoByteString::cast(string)->resource()->data();
+ start = ExternalTwoByteString::cast(string)->GetChars();
}
return FlatContent(Vector<const uc16>(start + offset, length));
}
@@ -5832,12 +6050,11 @@ SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
buffer->Reset(offset, this);
int character_position = offset;
int utf8_bytes = 0;
- while (buffer->has_more()) {
+ int last = unibrow::Utf16::kNoPreviousCharacter;
+ while (buffer->has_more() && character_position++ < offset + length) {
uint16_t character = buffer->GetNext();
- if (character_position < offset + length) {
- utf8_bytes += unibrow::Utf8::Length(character);
- }
- character_position++;
+ utf8_bytes += unibrow::Utf8::Length(character, last);
+ last = character;
}
if (length_return) {
@@ -5851,16 +6068,15 @@ SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls,
buffer->Seek(offset);
character_position = offset;
int utf8_byte_position = 0;
- while (buffer->has_more()) {
+ last = unibrow::Utf16::kNoPreviousCharacter;
+ while (buffer->has_more() && character_position++ < offset + length) {
uint16_t character = buffer->GetNext();
- if (character_position < offset + length) {
- if (allow_nulls == DISALLOW_NULLS && character == 0) {
- character = ' ';
- }
- utf8_byte_position +=
- unibrow::Utf8::Encode(result + utf8_byte_position, character);
+ if (allow_nulls == DISALLOW_NULLS && character == 0) {
+ character = ' ';
}
- character_position++;
+ utf8_byte_position +=
+ unibrow::Utf8::Encode(result + utf8_byte_position, character, last);
+ last = character;
}
result[utf8_byte_position] = 0;
return SmartArrayPointer<char>(result);
@@ -6045,44 +6261,26 @@ const unibrow::byte* ConsString::ConsStringReadBlock(ReadBlockBuffer* rbb,
}
-uint16_t ExternalAsciiString::ExternalAsciiStringGet(int index) {
- ASSERT(index >= 0 && index < length());
- return resource()->data()[index];
-}
-
-
const unibrow::byte* ExternalAsciiString::ExternalAsciiStringReadBlock(
unsigned* remaining,
unsigned* offset_ptr,
unsigned max_chars) {
// Cast const char* to unibrow::byte* (signedness difference).
const unibrow::byte* b =
- reinterpret_cast<const unibrow::byte*>(resource()->data()) + *offset_ptr;
+ reinterpret_cast<const unibrow::byte*>(GetChars()) + *offset_ptr;
*remaining = max_chars;
*offset_ptr += max_chars;
return b;
}
-const uc16* ExternalTwoByteString::ExternalTwoByteStringGetData(
- unsigned start) {
- return resource()->data() + start;
-}
-
-
-uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
- ASSERT(index >= 0 && index < length());
- return resource()->data()[index];
-}
-
-
void ExternalTwoByteString::ExternalTwoByteStringReadBlockIntoBuffer(
ReadBlockBuffer* rbb,
unsigned* offset_ptr,
unsigned max_chars) {
unsigned chars_read = 0;
unsigned offset = *offset_ptr;
- const uint16_t* data = resource()->data();
+ const uint16_t* data = GetChars();
while (chars_read < max_chars) {
uint16_t c = data[offset];
if (c <= kMaxAsciiCharCode) {
@@ -6128,9 +6326,7 @@ void ExternalAsciiString::ExternalAsciiStringReadBlockIntoBuffer(
unsigned max_chars) {
unsigned capacity = rbb->capacity - rbb->cursor;
if (max_chars > capacity) max_chars = capacity;
- memcpy(rbb->util_buffer + rbb->cursor,
- resource()->data() + *offset_ptr,
- max_chars);
+ memcpy(rbb->util_buffer + rbb->cursor, GetChars() + *offset_ptr, max_chars);
rbb->remaining += max_chars;
*offset_ptr += max_chars;
rbb->cursor += max_chars;
@@ -6194,73 +6390,6 @@ const unibrow::byte* String::ReadBlock(String* input,
}
-// This method determines the type of string involved and then gets the UTF8
-// length of the string. It doesn't flatten the string and has log(n) recursion
-// for a string of length n.
-int String::Utf8Length(String* input, int from, int to) {
- if (from == to) return 0;
- int total = 0;
- while (true) {
- if (input->IsAsciiRepresentation()) return total + to - from;
- switch (StringShape(input).representation_tag()) {
- case kConsStringTag: {
- ConsString* str = ConsString::cast(input);
- String* first = str->first();
- String* second = str->second();
- int first_length = first->length();
- if (first_length - from < to - first_length) {
- if (first_length > from) {
- // Left hand side is shorter.
- total += Utf8Length(first, from, first_length);
- input = second;
- from = 0;
- to -= first_length;
- } else {
- // We only need the right hand side.
- input = second;
- from -= first_length;
- to -= first_length;
- }
- } else {
- if (first_length <= to) {
- // Right hand side is shorter.
- total += Utf8Length(second, 0, to - first_length);
- input = first;
- to = first_length;
- } else {
- // We only need the left hand side.
- input = first;
- }
- }
- continue;
- }
- case kExternalStringTag:
- case kSeqStringTag: {
- Vector<const uc16> vector = input->GetFlatContent().ToUC16Vector();
- const uc16* p = vector.start();
- for (int i = from; i < to; i++) {
- total += unibrow::Utf8::Length(p[i]);
- }
- return total;
- }
- case kSlicedStringTag: {
- SlicedString* str = SlicedString::cast(input);
- int offset = str->offset();
- input = str->parent();
- from += offset;
- to += offset;
- continue;
- }
- default:
- break;
- }
- UNREACHABLE();
- return 0;
- }
- return 0;
-}
-
-
void Relocatable::PostGarbageCollectionProcessing() {
Isolate* isolate = Isolate::Current();
Relocatable* current = isolate->relocatable_top();
@@ -6572,13 +6701,13 @@ void String::WriteToFlat(String* src,
switch (StringShape(source).full_representation_tag()) {
case kAsciiStringTag | kExternalStringTag: {
CopyChars(sink,
- ExternalAsciiString::cast(source)->resource()->data() + from,
+ ExternalAsciiString::cast(source)->GetChars() + from,
to - from);
return;
}
case kTwoByteStringTag | kExternalStringTag: {
const uc16* data =
- ExternalTwoByteString::cast(source)->resource()->data();
+ ExternalTwoByteString::cast(source)->GetChars();
CopyChars(sink,
data + from,
to - from);
@@ -6616,10 +6745,21 @@ void String::WriteToFlat(String* src,
// Left hand side is longer. Recurse over right.
if (to > boundary) {
String* second = cons_string->second();
- WriteToFlat(second,
- sink + boundary - from,
- 0,
+ // When repeatedly appending to a string, we get a cons string that
+ // is unbalanced to the left, a list, essentially. We inline the
+ // common case of sequential ascii right child.
+ if (to - boundary == 1) {
+ sink[boundary - from] = static_cast<sinkchar>(second->Get(0));
+ } else if (second->IsSeqAsciiString()) {
+ CopyChars(sink + boundary - from,
+ SeqAsciiString::cast(second)->GetChars(),
to - boundary);
+ } else {
+ WriteToFlat(second,
+ sink + boundary - from,
+ 0,
+ to - boundary);
+ }
to = boundary;
}
source = first;
@@ -6643,8 +6783,10 @@ static inline bool CompareStringContents(IteratorA* ia, IteratorB* ib) {
// General slow case check. We know that the ia and ib iterators
// have the same length.
while (ia->has_more()) {
- uc32 ca = ia->GetNext();
- uc32 cb = ib->GetNext();
+ uint32_t ca = ia->GetNext();
+ uint32_t cb = ib->GetNext();
+ ASSERT(ca <= unibrow::Utf16::kMaxNonSurrogateCharCode);
+ ASSERT(cb <= unibrow::Utf16::kMaxNonSurrogateCharCode);
if (ca != cb)
return false;
}
@@ -6714,71 +6856,6 @@ static inline bool CompareStringContentsPartial(Isolate* isolate,
}
-bool String::SlowEqualsExternal(uc16 *string, int length) {
- int len = this->length();
- if (len != length) return false;
- if (len == 0) return true;
-
- // We know the strings are both non-empty. Compare the first chars
- // before we try to flatten the strings.
- if (this->Get(0) != string[0]) return false;
-
- String* lhs = this->TryFlattenGetString();
-
- if (lhs->IsFlat()) {
- String::FlatContent lhs_content = lhs->GetFlatContent();
- if (lhs->IsAsciiRepresentation()) {
- Vector<const char> vec1 = lhs_content.ToAsciiVector();
- VectorIterator<char> buf1(vec1);
- VectorIterator<uc16> ib(string, length);
- return CompareStringContents(&buf1, &ib);
- } else {
- Vector<const uc16> vec1 = lhs_content.ToUC16Vector();
- Vector<const uc16> vec2(string, length);
- return CompareRawStringContents(vec1, vec2);
- }
- } else {
- Isolate* isolate = GetIsolate();
- isolate->objects_string_compare_buffer_a()->Reset(0, lhs);
- VectorIterator<uc16> ib(string, length);
- return CompareStringContents(isolate->objects_string_compare_buffer_a(), &ib);
- }
-}
-
-
-bool String::SlowEqualsExternal(char *string, int length)
-{
- int len = this->length();
- if (len != length) return false;
- if (len == 0) return true;
-
- // We know the strings are both non-empty. Compare the first chars
- // before we try to flatten the strings.
- if (this->Get(0) != string[0]) return false;
-
- String* lhs = this->TryFlattenGetString();
-
- if (StringShape(lhs).IsSequentialAscii()) {
- const char* str1 = SeqAsciiString::cast(lhs)->GetChars();
- return CompareRawStringContents(Vector<const char>(str1, len),
- Vector<const char>(string, len));
- }
-
- if (lhs->IsFlat()) {
- String::FlatContent lhs_content = lhs->GetFlatContent();
- Vector<const uc16> vec1 = lhs_content.ToUC16Vector();
- VectorIterator<const uc16> buf1(vec1);
- VectorIterator<char> buf2(string, length);
- return CompareStringContents(&buf1, &buf2);
- } else {
- Isolate* isolate = GetIsolate();
- isolate->objects_string_compare_buffer_a()->Reset(0, lhs);
- VectorIterator<char> ib(string, length);
- return CompareStringContents(isolate->objects_string_compare_buffer_a(), &ib);
- }
-}
-
-
bool String::SlowEquals(String* other) {
// Fast check: negative check with lengths.
int len = length();
@@ -6788,6 +6865,20 @@ bool String::SlowEquals(String* other) {
// Fast check: if hash code is computed for both strings
// a fast negative check can be performed.
if (HasHashCode() && other->HasHashCode()) {
+#ifdef DEBUG
+ if (FLAG_enable_slow_asserts) {
+ if (Hash() != other->Hash()) {
+ bool found_difference = false;
+ for (int i = 0; i < len; i++) {
+ if (Get(i) != other->Get(i)) {
+ found_difference = true;
+ break;
+ }
+ }
+ ASSERT(found_difference);
+ }
+ }
+#endif
if (Hash() != other->Hash()) return false;
}
@@ -6878,8 +6969,14 @@ bool String::IsEqualTo(Vector<const char> str) {
decoder->Reset(str.start(), str.length());
int i;
for (i = 0; i < slen && decoder->has_more(); i++) {
- uc32 r = decoder->GetNext();
- if (Get(i) != r) return false;
+ uint32_t r = decoder->GetNext();
+ if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) {
+ if (i > slen - 1) return false;
+ if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false;
+ if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false;
+ } else {
+ if (Get(i) != r) return false;
+ }
}
return i == slen && !decoder->has_more();
}
@@ -6923,12 +7020,16 @@ uint32_t String::ComputeAndSetHash() {
// Compute the hash code.
uint32_t field = 0;
if (StringShape(this).IsSequentialAscii()) {
- field = HashSequentialString(SeqAsciiString::cast(this)->GetChars(), len);
+ field = HashSequentialString(SeqAsciiString::cast(this)->GetChars(),
+ len,
+ GetHeap()->HashSeed());
} else if (StringShape(this).IsSequentialTwoByte()) {
- field = HashSequentialString(SeqTwoByteString::cast(this)->GetChars(), len);
+ field = HashSequentialString(SeqTwoByteString::cast(this)->GetChars(),
+ len,
+ GetHeap()->HashSeed());
} else {
StringInputBuffer buffer(this);
- field = ComputeHashField(&buffer, len);
+ field = ComputeHashField(&buffer, len, GetHeap()->HashSeed());
}
// Store the hash code in the object.
@@ -7005,6 +7106,22 @@ uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) {
}
+void StringHasher::AddSurrogatePair(uc32 c) {
+ uint16_t lead = unibrow::Utf16::LeadSurrogate(c);
+ AddCharacter(lead);
+ uint16_t trail = unibrow::Utf16::TrailSurrogate(c);
+ AddCharacter(trail);
+}
+
+
+void StringHasher::AddSurrogatePairNoIndex(uc32 c) {
+ uint16_t lead = unibrow::Utf16::LeadSurrogate(c);
+ AddCharacterNoIndex(lead);
+ uint16_t trail = unibrow::Utf16::TrailSurrogate(c);
+ AddCharacterNoIndex(trail);
+}
+
+
uint32_t StringHasher::GetHashField() {
ASSERT(is_valid());
if (length_ <= String::kMaxHashCalcLength) {
@@ -7019,8 +7136,9 @@ uint32_t StringHasher::GetHashField() {
uint32_t String::ComputeHashField(unibrow::CharacterStream* buffer,
- int length) {
- StringHasher hasher(length);
+ int length,
+ uint32_t seed) {
+ StringHasher hasher(length, seed);
// Very long strings have a trivial hash that doesn't inspect the
// string contents.
@@ -7060,7 +7178,9 @@ void String::PrintOn(FILE* file) {
}
-void Map::CreateOneBackPointer(Map* target) {
+void Map::CreateOneBackPointer(Object* transition_target) {
+ if (!transition_target->IsMap()) return;
+ Map* target = Map::cast(transition_target);
#ifdef DEBUG
// Verify target.
Object* source_prototype = prototype();
@@ -7082,90 +7202,131 @@ void Map::CreateOneBackPointer(Map* target) {
void Map::CreateBackPointers() {
DescriptorArray* descriptors = instance_descriptors();
for (int i = 0; i < descriptors->number_of_descriptors(); i++) {
- if (descriptors->GetType(i) == MAP_TRANSITION ||
- descriptors->GetType(i) == ELEMENTS_TRANSITION ||
- descriptors->GetType(i) == CONSTANT_TRANSITION) {
- Object* object = reinterpret_cast<Object*>(descriptors->GetValue(i));
- if (object->IsMap()) {
- CreateOneBackPointer(reinterpret_cast<Map*>(object));
- } else {
- ASSERT(object->IsFixedArray());
- ASSERT(descriptors->GetType(i) == ELEMENTS_TRANSITION);
- FixedArray* array = reinterpret_cast<FixedArray*>(object);
- for (int i = 0; i < array->length(); ++i) {
- Map* target = reinterpret_cast<Map*>(array->get(i));
- if (!target->IsUndefined()) {
- CreateOneBackPointer(target);
+ switch (descriptors->GetType(i)) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ CreateOneBackPointer(descriptors->GetValue(i));
+ break;
+ case ELEMENTS_TRANSITION: {
+ Object* object = descriptors->GetValue(i);
+ if (object->IsMap()) {
+ CreateOneBackPointer(object);
+ } else {
+ FixedArray* array = FixedArray::cast(object);
+ for (int i = 0; i < array->length(); ++i) {
+ CreateOneBackPointer(array->get(i));
}
}
+ break;
}
+ case CALLBACKS: {
+ Object* object = descriptors->GetValue(i);
+ if (object->IsAccessorPair()) {
+ AccessorPair* accessors = AccessorPair::cast(object);
+ CreateOneBackPointer(accessors->getter());
+ CreateOneBackPointer(accessors->setter());
+ }
+ break;
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ break;
}
}
}
+bool Map::RestoreOneBackPointer(Object* object,
+ Object* real_prototype,
+ bool* keep_entry) {
+ if (!object->IsMap()) return false;
+ Map* map = Map::cast(object);
+ if (Marking::MarkBitFrom(map).Get()) {
+ *keep_entry = true;
+ return false;
+ }
+ ASSERT(map->prototype() == this || map->prototype() == real_prototype);
+ // Getter prototype() is read-only, set_prototype() has side effects.
+ *RawField(map, Map::kPrototypeOffset) = real_prototype;
+ return true;
+}
+
+
void Map::ClearNonLiveTransitions(Heap* heap, Object* real_prototype) {
- // Live DescriptorArray objects will be marked, so we must use
- // low-level accessors to get and modify their data.
- DescriptorArray* d = reinterpret_cast<DescriptorArray*>(
+ DescriptorArray* d = DescriptorArray::cast(
*RawField(this, Map::kInstanceDescriptorsOrBitField3Offset));
if (d->IsEmpty()) return;
Smi* NullDescriptorDetails =
PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi();
- FixedArray* contents = reinterpret_cast<FixedArray*>(
+ FixedArray* contents = FixedArray::cast(
d->get(DescriptorArray::kContentArrayIndex));
ASSERT(contents->length() >= 2);
for (int i = 0; i < contents->length(); i += 2) {
- // If the pair (value, details) is a map transition,
- // check if the target is live. If not, null the descriptor.
- // Also drop the back pointer for that map transition, so that this
- // map is not reached again by following a back pointer from a
- // non-live object.
+ // If the pair (value, details) is a map transition, check if the target is
+ // live. If not, null the descriptor. Also drop the back pointer for that
+ // map transition, so that this map is not reached again by following a back
+ // pointer from a non-live object.
+ bool keep_entry = false;
PropertyDetails details(Smi::cast(contents->get(i + 1)));
- if (details.type() == MAP_TRANSITION ||
- details.type() == ELEMENTS_TRANSITION ||
- details.type() == CONSTANT_TRANSITION) {
- Object* object = reinterpret_cast<Object*>(contents->get(i));
- if (object->IsMap()) {
- Map* target = reinterpret_cast<Map*>(object);
- ASSERT(target->IsHeapObject());
- MarkBit map_mark = Marking::MarkBitFrom(target);
- if (!map_mark.Get()) {
- ASSERT(target->IsMap());
- contents->set_unchecked(i + 1, NullDescriptorDetails);
- contents->set_null_unchecked(heap, i);
- ASSERT(target->prototype() == this ||
- target->prototype() == real_prototype);
- // Getter prototype() is read-only, set_prototype() has side effects.
- *RawField(target, Map::kPrototypeOffset) = real_prototype;
- }
- } else {
- ASSERT(object->IsFixedArray());
- ASSERT(details.type() == ELEMENTS_TRANSITION);
- FixedArray* array = reinterpret_cast<FixedArray*>(object);
- bool reachable_map_found = false;
- for (int j = 0; j < array->length(); ++j) {
- Map* target = reinterpret_cast<Map*>(array->get(j));
- ASSERT(target->IsHeapObject());
- MarkBit map_mark = Marking::MarkBitFrom(target);
- if (!map_mark.Get()) {
- ASSERT(target->IsMap());
- array->set_undefined(j);
- ASSERT(target->prototype() == this ||
- target->prototype() == real_prototype);
- // Getter prototype() is read-only, set_prototype() has side
- // effects.
- *RawField(target, Map::kPrototypeOffset) = real_prototype;
- } else if (target->IsMap()) {
- reachable_map_found = true;
+ switch (details.type()) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ RestoreOneBackPointer(contents->get(i), real_prototype, &keep_entry);
+ break;
+ case ELEMENTS_TRANSITION: {
+ Object* object = contents->get(i);
+ if (object->IsMap()) {
+ RestoreOneBackPointer(object, real_prototype, &keep_entry);
+ } else {
+ FixedArray* array = FixedArray::cast(object);
+ for (int j = 0; j < array->length(); ++j) {
+ if (RestoreOneBackPointer(array->get(j),
+ real_prototype,
+ &keep_entry)) {
+ array->set_undefined(j);
+ }
}
}
- // If no map was found, make sure the FixedArray also gets collected.
- if (!reachable_map_found) {
- contents->set_unchecked(i + 1, NullDescriptorDetails);
- contents->set_null_unchecked(heap, i);
+ break;
+ }
+ case CALLBACKS: {
+ Object* object = contents->get(i);
+ if (object->IsAccessorPair()) {
+ AccessorPair* accessors = AccessorPair::cast(object);
+ if (RestoreOneBackPointer(accessors->getter(),
+ real_prototype,
+ &keep_entry)) {
+ accessors->set_getter(heap->the_hole_value());
+ }
+ if (RestoreOneBackPointer(accessors->setter(),
+ real_prototype,
+ &keep_entry)) {
+ accessors->set_setter(heap->the_hole_value());
+ }
+ } else {
+ keep_entry = true;
}
+ break;
}
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ keep_entry = true;
+ break;
+ }
+ // Make sure that an entry containing only dead transitions gets collected.
+ // What we *really* want to do here is removing this entry completely, but
+ // for technical reasons we can't do this, so we zero it out instead.
+ if (!keep_entry) {
+ contents->set_unchecked(i + 1, NullDescriptorDetails);
+ contents->set_null_unchecked(heap, i);
}
}
}
@@ -7288,11 +7449,19 @@ bool JSFunction::IsInlineable() {
}
-Object* JSFunction::SetInstancePrototype(Object* value) {
- ASSERT(value->IsJSObject());
+MaybeObject* JSFunction::SetInstancePrototype(Object* value) {
+ ASSERT(value->IsJSReceiver());
Heap* heap = GetHeap();
if (has_initial_map()) {
- initial_map()->set_prototype(value);
+ // If the function has allocated the initial map
+ // replace it with a copy containing the new prototype.
+ Map* new_map;
+ MaybeObject* maybe_new_map = initial_map()->CopyDropTransitions();
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ new_map->set_prototype(value);
+ MaybeObject* maybe_object =
+ set_initial_map_and_cache_transitions(new_map);
+ if (maybe_object->IsFailure()) return maybe_object;
} else {
// Put the value in the initial map field until an initial map is
// needed. At that point, a new initial map is created and the
@@ -7308,19 +7477,18 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
ASSERT(should_have_prototype());
Object* construct_prototype = value;
- // If the value is not a JSObject, store the value in the map's
+ // If the value is not a JSReceiver, store the value in the map's
// constructor field so it can be accessed. Also, set the prototype
// used for constructing objects to the original object prototype.
// See ECMA-262 13.2.2.
- if (!value->IsJSObject()) {
+ if (!value->IsJSReceiver()) {
// Copy the map so this does not affect unrelated functions.
// Remove map transitions because they point to maps with a
// different prototype.
- Object* new_object;
+ Map* new_map;
{ MaybeObject* maybe_new_map = map()->CopyDropTransitions();
- if (!maybe_new_map->ToObject(&new_object)) return maybe_new_map;
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
}
- Map* new_map = Map::cast(new_object);
Heap* heap = new_map->GetHeap();
set_map(new_map);
new_map->set_constructor(value);
@@ -7338,18 +7506,18 @@ MaybeObject* JSFunction::SetPrototype(Object* value) {
Object* JSFunction::RemovePrototype() {
Context* global_context = context()->global_context();
- Map* no_prototype_map = shared()->strict_mode()
- ? global_context->strict_mode_function_without_prototype_map()
- : global_context->function_without_prototype_map();
+ Map* no_prototype_map = shared()->is_classic_mode()
+ ? global_context->function_without_prototype_map()
+ : global_context->strict_mode_function_without_prototype_map();
if (map() == no_prototype_map) {
// Be idempotent.
return this;
}
- ASSERT(!shared()->strict_mode() ||
- map() == global_context->strict_mode_function_map());
- ASSERT(shared()->strict_mode() || map() == global_context->function_map());
+ ASSERT(map() == (shared()->is_classic_mode()
+ ? global_context->function_map()
+ : global_context->strict_mode_function_map()));
set_map(no_prototype_map);
set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value());
@@ -7377,12 +7545,12 @@ Context* JSFunction::GlobalContextFromLiterals(FixedArray* literals) {
MaybeObject* Oddball::Initialize(const char* to_string,
Object* to_number,
byte kind) {
- Object* symbol;
+ String* symbol;
{ MaybeObject* maybe_symbol =
Isolate::Current()->heap()->LookupAsciiSymbol(to_string);
- if (!maybe_symbol->ToObject(&symbol)) return maybe_symbol;
+ if (!maybe_symbol->To(&symbol)) return maybe_symbol;
}
- set_to_string(String::cast(symbol));
+ set_to_string(symbol);
set_to_number(to_number);
set_kind(kind);
return this;
@@ -7402,13 +7570,10 @@ bool SharedFunctionInfo::HasSourceCode() {
}
-Object* SharedFunctionInfo::GetSourceCode() {
- Isolate* isolate = GetIsolate();
- if (!HasSourceCode()) return isolate->heap()->undefined_value();
- HandleScope scope(isolate);
- Object* source = Script::cast(script())->source();
- return *SubString(Handle<String>(String::cast(source), isolate),
- start_position(), end_position());
+Handle<Object> SharedFunctionInfo::GetSourceCode() {
+ if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value();
+ Handle<String> source(String::cast(Script::cast(script())->source()));
+ return SubString(source, start_position(), end_position());
}
@@ -7459,7 +7624,7 @@ bool SharedFunctionInfo::CanGenerateInlineConstructor(Object* prototype) {
LookupResult result(heap->isolate());
String* name = GetThisPropertyAssignmentName(i);
js_object->LocalLookupRealNamedProperty(name, &result);
- if (result.IsProperty() && result.type() == CALLBACKS) {
+ if (result.IsFound() && result.type() == CALLBACKS) {
return false;
}
}
@@ -7605,7 +7770,7 @@ void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) {
}
-void SharedFunctionInfo::DisableOptimization(JSFunction* function) {
+void SharedFunctionInfo::DisableOptimization() {
// Disable optimization for the shared function info and mark the
// code as non-optimizable. The marker on the shared function info
// is there because we flush non-optimized code thereby loosing the
@@ -7621,17 +7786,12 @@ void SharedFunctionInfo::DisableOptimization(JSFunction* function) {
code()->set_optimizable(false);
}
if (FLAG_trace_opt) {
- PrintF("[disabled optimization for: ");
- function->PrintName();
- PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
+ PrintF("[disabled optimization for %s]\n", *DebugName()->ToCString());
}
}
bool SharedFunctionInfo::VerifyBailoutId(int id) {
- // TODO(srdjan): debugging ARM crashes in hydrogen. OK to disable while
- // we are always bailing out on ARM.
-
ASSERT(id != AstNode::kNoNumber);
Code* unoptimized = code();
DeoptimizationOutputData* data =
@@ -7674,8 +7834,8 @@ void SharedFunctionInfo::DetachInitialMap() {
Map* map = reinterpret_cast<Map*>(initial_map());
// Make the map remember to restore the link if it survives the GC.
- map->set_bit_field3(
- map->bit_field3() | (1 << Map::kAttachedToSharedFunctionInfo));
+ map->set_bit_field2(
+ map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo));
// Undo state changes made by StartInobjectTracking (except the
// construction_count). This way if the initial map does not survive the GC
@@ -7695,8 +7855,8 @@ void SharedFunctionInfo::DetachInitialMap() {
// Called from GC, hence reinterpret_cast and unchecked accessors.
void SharedFunctionInfo::AttachInitialMap(Map* map) {
- map->set_bit_field3(
- map->bit_field3() & ~(1 << Map::kAttachedToSharedFunctionInfo));
+ map->set_bit_field2(
+ map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo));
// Resume inobject slack tracking.
set_initial_map(map);
@@ -7709,6 +7869,22 @@ void SharedFunctionInfo::AttachInitialMap(Map* map) {
}
+void SharedFunctionInfo::ResetForNewContext(int new_ic_age) {
+ code()->ClearInlineCaches();
+ set_ic_age(new_ic_age);
+ if (code()->kind() == Code::FUNCTION) {
+ code()->set_profiler_ticks(0);
+ if (optimization_disabled() &&
+ opt_count() >= Compiler::kDefaultMaxOptCount) {
+ // Re-enable optimizations if they were disabled due to opt_count limit.
+ set_optimization_disabled(false);
+ code()->set_optimizable(true);
+ }
+ set_opt_count(0);
+ }
+}
+
+
static void GetMinInobjectSlack(Map* map, void* data) {
int slack = map->unused_property_fields();
if (*reinterpret_cast<int*>(data) > slack) {
@@ -7752,6 +7928,28 @@ void SharedFunctionInfo::CompleteInobjectSlackTracking() {
}
+void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
+ v->VisitSharedFunctionInfo(this);
+ SharedFunctionInfo::BodyDescriptor::IterateBody(this, v);
+}
+
+
+#define DECLARE_TAG(ignore1, name, ignore2) name,
+const char* const VisitorSynchronization::kTags[
+ VisitorSynchronization::kNumberOfSyncTags] = {
+ VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
+};
+#undef DECLARE_TAG
+
+
+#define DECLARE_TAG(ignore1, ignore2, name) name,
+const char* const VisitorSynchronization::kTagNames[
+ VisitorSynchronization::kNumberOfSyncTags] = {
+ VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG)
+};
+#undef DECLARE_TAG
+
+
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
@@ -7793,12 +7991,15 @@ void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
}
-
void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) {
ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
VisitPointer(rinfo->target_object_address());
}
+void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) {
+ Address* p = rinfo->target_reference_address();
+ VisitExternalReferences(p, p + 1);
+}
void Code::InvalidateRelocation() {
set_relocation_info(GetHeap()->empty_byte_array());
@@ -7936,6 +8137,21 @@ Map* Code::FindFirstMap() {
}
+void Code::ClearInlineCaches() {
+ int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
+ RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
+ RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT);
+ for (RelocIterator it(this, mask); !it.done(); it.next()) {
+ RelocInfo* info = it.rinfo();
+ Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
+ if (target->is_inline_cache_stub()) {
+ IC::Clear(info->pc());
+ }
+ }
+}
+
+
#ifdef ENABLE_DISASSEMBLER
void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
@@ -7944,11 +8160,14 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
if (0 == deopt_count) return;
- PrintF(out, "%6s %6s %6s %12s\n", "index", "ast id", "argc",
+ PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc",
FLAG_print_code_verbose ? "commands" : "");
for (int i = 0; i < deopt_count; i++) {
- PrintF(out, "%6d %6d %6d",
- i, AstId(i)->value(), ArgumentsStackHeight(i)->value());
+ PrintF(out, "%6d %6d %6d %6d",
+ i,
+ AstId(i)->value(),
+ ArgumentsStackHeight(i)->value(),
+ Pc(i)->value());
if (!FLAG_print_code_verbose) {
PrintF(out, "\n");
@@ -7961,8 +8180,11 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
static_cast<Translation::Opcode>(iterator.Next());
ASSERT(Translation::BEGIN == opcode);
int frame_count = iterator.Next();
- PrintF(out, " %s {count=%d}\n", Translation::StringFor(opcode),
- frame_count);
+ int jsframe_count = iterator.Next();
+ PrintF(out, " %s {frame count=%d, js frame count=%d}\n",
+ Translation::StringFor(opcode),
+ frame_count,
+ jsframe_count);
while (iterator.HasNext() &&
Translation::BEGIN !=
@@ -7974,7 +8196,7 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
UNREACHABLE();
break;
- case Translation::FRAME: {
+ case Translation::JS_FRAME: {
int ast_id = iterator.Next();
int function_id = iterator.Next();
JSFunction* function =
@@ -7986,6 +8208,18 @@ void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) {
break;
}
+ case Translation::ARGUMENTS_ADAPTOR_FRAME:
+ case Translation::CONSTRUCT_STUB_FRAME: {
+ int function_id = iterator.Next();
+ JSFunction* function =
+ JSFunction::cast(LiteralArray()->get(function_id));
+ unsigned height = iterator.Next();
+ PrintF(out, "{function=");
+ function->PrintName(out);
+ PrintF(out, ", height=%u}", height);
+ break;
+ }
+
case Translation::DUPLICATE:
break;
@@ -8109,7 +8343,7 @@ const char* Code::PropertyType2String(PropertyType type) {
case CONSTANT_TRANSITION: return "CONSTANT_TRANSITION";
case NULL_DESCRIPTOR: return "NULL_DESCRIPTOR";
}
- UNREACHABLE();
+ UNREACHABLE(); // keep the compiler happy
return NULL;
}
@@ -8218,29 +8452,6 @@ void Code::Disassemble(const char* name, FILE* out) {
#endif // ENABLE_DISASSEMBLER
-static void CopyFastElementsToFast(FixedArray* source,
- FixedArray* destination,
- WriteBarrierMode mode) {
- uint32_t count = static_cast<uint32_t>(source->length());
- for (uint32_t i = 0; i < count; ++i) {
- destination->set(i, source->get(i), mode);
- }
-}
-
-
-static void CopySlowElementsToFast(NumberDictionary* source,
- FixedArray* destination,
- WriteBarrierMode mode) {
- for (int i = 0; i < source->Capacity(); ++i) {
- Object* key = source->KeyAt(i);
- if (key->IsNumber()) {
- uint32_t entry = static_cast<uint32_t>(key->Number());
- destination->set(entry, source->ValueAt(i), mode);
- }
- }
-}
-
-
MaybeObject* JSObject::SetFastElementsCapacityAndLength(
int capacity,
int length,
@@ -8250,111 +8461,46 @@ MaybeObject* JSObject::SetFastElementsCapacityAndLength(
ASSERT(!HasExternalArrayElements());
// Allocate a new fast elements backing store.
- FixedArray* new_elements = NULL;
- { Object* object;
- MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe->ToObject(&object)) return maybe;
- new_elements = FixedArray::cast(object);
+ FixedArray* new_elements;
+ { MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
+ if (!maybe->To(&new_elements)) return maybe;
}
// Find the new map to use for this object if there is a map change.
Map* new_map = NULL;
if (elements()->map() != heap->non_strict_arguments_elements_map()) {
- Object* object;
+ // The resized array has FAST_SMI_ONLY_ELEMENTS if the capacity mode forces
+ // it, or if it's allowed and the old elements array contained only SMIs.
bool has_fast_smi_only_elements =
- (set_capacity_mode == kAllowSmiOnlyElements) &&
- (elements()->map()->has_fast_smi_only_elements() ||
- elements() == heap->empty_fixed_array());
+ (set_capacity_mode == kForceSmiOnlyElements) ||
+ ((set_capacity_mode == kAllowSmiOnlyElements) &&
+ (elements()->map()->has_fast_smi_only_elements() ||
+ elements() == heap->empty_fixed_array()));
ElementsKind elements_kind = has_fast_smi_only_elements
? FAST_SMI_ONLY_ELEMENTS
: FAST_ELEMENTS;
- MaybeObject* maybe = GetElementsTransitionMap(elements_kind);
- if (!maybe->ToObject(&object)) return maybe;
- new_map = Map::cast(object);
+ MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), elements_kind);
+ if (!maybe->To(&new_map)) return maybe;
}
- FixedArrayBase* old_elements_raw = elements();
+ FixedArrayBase* old_elements = elements();
ElementsKind elements_kind = GetElementsKind();
- switch (elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode(new_elements->GetWriteBarrierMode(no_gc));
- CopyFastElementsToFast(FixedArray::cast(old_elements_raw),
- new_elements, mode);
- set_map(new_map);
- set_elements(new_elements);
- break;
- }
- case DICTIONARY_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
- CopySlowElementsToFast(NumberDictionary::cast(old_elements_raw),
- new_elements,
- mode);
- set_map(new_map);
- set_elements(new_elements);
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- AssertNoAllocation no_gc;
- WriteBarrierMode mode = new_elements->GetWriteBarrierMode(no_gc);
- // The object's map and the parameter map are unchanged, the unaliased
- // arguments are copied to the new backing store.
- FixedArray* parameter_map = FixedArray::cast(old_elements_raw);
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- CopySlowElementsToFast(NumberDictionary::cast(arguments),
- new_elements,
- mode);
- } else {
- CopyFastElementsToFast(arguments, new_elements, mode);
- }
- parameter_map->set(1, new_elements);
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- FixedDoubleArray* old_elements = FixedDoubleArray::cast(old_elements_raw);
- uint32_t old_length = static_cast<uint32_t>(old_elements->length());
- // Fill out the new array with this content and array holes.
- for (uint32_t i = 0; i < old_length; i++) {
- if (!old_elements->is_the_hole(i)) {
- Object* obj;
- // Objects must be allocated in the old object space, since the
- // overall number of HeapNumbers needed for the conversion might
- // exceed the capacity of new space, and we would fail repeatedly
- // trying to convert the FixedDoubleArray.
- MaybeObject* maybe_value_object =
- GetHeap()->AllocateHeapNumber(old_elements->get_scalar(i),
- TENURED);
- if (!maybe_value_object->ToObject(&obj)) return maybe_value_object;
- // Force write barrier. It's not worth trying to exploit
- // elems->GetWriteBarrierMode(), since it requires an
- // AssertNoAllocation stack object that would have to be positioned
- // after the HeapNumber allocation anyway.
- new_elements->set(i, obj, UPDATE_WRITE_BARRIER);
- }
- }
- set_map(new_map);
- set_elements(new_elements);
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case EXTERNAL_PIXEL_ELEMENTS:
- UNREACHABLE();
- break;
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
+ ElementsKind to_kind = (elements_kind == FAST_SMI_ONLY_ELEMENTS)
+ ? FAST_SMI_ONLY_ELEMENTS
+ : FAST_ELEMENTS;
+ // int copy_size = Min(old_elements_raw->length(), new_elements->length());
+ accessor->CopyElements(this, new_elements, to_kind);
+ if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ set_map_and_elements(new_map, new_elements);
+ } else {
+ FixedArray* parameter_map = FixedArray::cast(old_elements);
+ parameter_map->set(1, new_elements);
}
if (FLAG_trace_elements_transitions) {
- PrintElementsTransition(stdout, elements_kind, old_elements_raw,
- FAST_ELEMENTS, new_elements);
+ PrintElementsTransition(stdout, elements_kind, old_elements,
+ GetElementsKind(), new_elements);
}
// Update the length if necessary.
@@ -8373,39 +8519,27 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
// We should never end in here with a pixel or external array.
ASSERT(!HasExternalArrayElements());
- Object* obj;
+ FixedDoubleArray* elems;
{ MaybeObject* maybe_obj =
heap->AllocateUninitializedFixedDoubleArray(capacity);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ if (!maybe_obj->To(&elems)) return maybe_obj;
}
- FixedDoubleArray* elems = FixedDoubleArray::cast(obj);
+ Map* new_map;
{ MaybeObject* maybe_obj =
- GetElementsTransitionMap(FAST_DOUBLE_ELEMENTS);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ GetElementsTransitionMap(heap->isolate(), FAST_DOUBLE_ELEMENTS);
+ if (!maybe_obj->To(&new_map)) return maybe_obj;
}
- Map* new_map = Map::cast(obj);
FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind(GetElementsKind());
- AssertNoAllocation no_gc;
- switch (elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- elems->Initialize(FixedArray::cast(old_elements));
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- elems->Initialize(FixedDoubleArray::cast(old_elements));
- break;
- }
- case DICTIONARY_ELEMENTS: {
- elems->Initialize(NumberDictionary::cast(old_elements));
- break;
- }
- default:
- UNREACHABLE();
- break;
+ ElementsKind elements_kind = GetElementsKind();
+ ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
+ accessor->CopyElements(this, elems, FAST_DOUBLE_ELEMENTS);
+ if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
+ set_map_and_elements(new_map, elems);
+ } else {
+ FixedArray* parameter_map = FixedArray::cast(old_elements);
+ parameter_map->set(1, elems);
}
if (FLAG_trace_elements_transitions) {
@@ -8413,11 +8547,6 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
FAST_DOUBLE_ELEMENTS, elems);
}
- ASSERT(new_map->has_fast_double_elements());
- set_map(new_map);
- ASSERT(elems->IsFixedDoubleArray());
- set_elements(elems);
-
if (IsJSArray()) {
JSArray::cast(this)->set_length(Smi::FromInt(length));
}
@@ -8426,61 +8555,6 @@ MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength(
}
-MaybeObject* JSObject::SetSlowElements(Object* len) {
- // We should never end in here with a pixel or external array.
- ASSERT(!HasExternalArrayElements());
-
- uint32_t new_length = static_cast<uint32_t>(len->Number());
-
- FixedArrayBase* old_elements = elements();
- ElementsKind elements_kind = GetElementsKind();
- switch (elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS: {
- // Make sure we never try to shrink dense arrays into sparse arrays.
- ASSERT(static_cast<uint32_t>(old_elements->length()) <= new_length);
- MaybeObject* result = NormalizeElements();
- if (result->IsFailure()) return result;
-
- // Update length for JSArrays.
- if (IsJSArray()) JSArray::cast(this)->set_length(len);
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (IsJSArray()) {
- uint32_t old_length =
- static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
- element_dictionary()->RemoveNumberEntries(new_length, old_length),
- JSArray::cast(this)->set_length(len);
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNIMPLEMENTED();
- break;
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case EXTERNAL_PIXEL_ELEMENTS:
- UNREACHABLE();
- break;
- }
-
- if (FLAG_trace_elements_transitions) {
- PrintElementsTransition(stdout, elements_kind, old_elements,
- DICTIONARY_ELEMENTS, elements());
- }
-
- return this;
-}
-
-
MaybeObject* JSArray::Initialize(int capacity) {
Heap* heap = GetHeap();
ASSERT(capacity >= 0);
@@ -8489,11 +8563,8 @@ MaybeObject* JSArray::Initialize(int capacity) {
if (capacity == 0) {
new_elements = heap->empty_fixed_array();
} else {
- Object* obj;
- { MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- new_elements = FixedArray::cast(obj);
+ MaybeObject* maybe_obj = heap->AllocateFixedArrayWithHoles(capacity);
+ if (!maybe_obj->To(&new_elements)) return maybe_obj;
}
set_elements(new_elements);
return this;
@@ -8501,176 +8572,15 @@ MaybeObject* JSArray::Initialize(int capacity) {
void JSArray::Expand(int required_size) {
- Handle<JSArray> self(this);
- Handle<FixedArray> old_backing(FixedArray::cast(elements()));
- int old_size = old_backing->length();
- int new_size = required_size > old_size ? required_size : old_size;
- Handle<FixedArray> new_backing = FACTORY->NewFixedArray(new_size);
- // Can't use this any more now because we may have had a GC!
- for (int i = 0; i < old_size; i++) new_backing->set(i, old_backing->get(i));
- GetIsolate()->factory()->SetContent(self, new_backing);
-}
-
-
-static Failure* ArrayLengthRangeError(Heap* heap) {
- HandleScope scope(heap->isolate());
- return heap->isolate()->Throw(
- *FACTORY->NewRangeError("invalid_array_length",
- HandleVector<Object>(NULL, 0)));
+ GetIsolate()->factory()->SetElementsCapacityAndLength(
+ Handle<JSArray>(this), required_size, required_size);
}
-MaybeObject* JSObject::SetElementsLength(Object* len) {
+MaybeObject* JSArray::SetElementsLength(Object* len) {
// We should never end in here with a pixel or external array.
ASSERT(AllowsSetElementsLength());
-
- MaybeObject* maybe_smi_length = len->ToSmi();
- Object* smi_length = Smi::FromInt(0);
- if (maybe_smi_length->ToObject(&smi_length) && smi_length->IsSmi()) {
- const int value = Smi::cast(smi_length)->value();
- if (value < 0) return ArrayLengthRangeError(GetHeap());
- ElementsKind elements_kind = GetElementsKind();
- switch (elements_kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS: {
- int old_capacity = FixedArrayBase::cast(elements())->length();
- if (value <= old_capacity) {
- if (IsJSArray()) {
- Object* obj;
- if (elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- MaybeObject* maybe_obj = EnsureWritableFastElements();
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- if (2 * value <= old_capacity) {
- // If more than half the elements won't be used, trim the array.
- if (value == 0) {
- initialize_elements();
- } else {
- Address filler_start;
- int filler_size;
- if (elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- FixedArray* fast_elements = FixedArray::cast(elements());
- fast_elements->set_length(value);
- filler_start = fast_elements->address() +
- FixedArray::OffsetOfElementAt(value);
- filler_size = (old_capacity - value) * kPointerSize;
- } else {
- ASSERT(GetElementsKind() == FAST_DOUBLE_ELEMENTS);
- FixedDoubleArray* fast_double_elements =
- FixedDoubleArray::cast(elements());
- fast_double_elements->set_length(value);
- filler_start = fast_double_elements->address() +
- FixedDoubleArray::OffsetOfElementAt(value);
- filler_size = (old_capacity - value) * kDoubleSize;
- }
- GetHeap()->CreateFillerObjectAt(filler_start, filler_size);
- }
- } else {
- // Otherwise, fill the unused tail with holes.
- int old_length = FastD2I(JSArray::cast(this)->length()->Number());
- if (elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- FixedArray* fast_elements = FixedArray::cast(elements());
- for (int i = value; i < old_length; i++) {
- fast_elements->set_the_hole(i);
- }
- } else {
- ASSERT(elements_kind == FAST_DOUBLE_ELEMENTS);
- FixedDoubleArray* fast_double_elements =
- FixedDoubleArray::cast(elements());
- for (int i = value; i < old_length; i++) {
- fast_double_elements->set_the_hole(i);
- }
- }
- }
- JSArray::cast(this)->set_length(Smi::cast(smi_length));
- }
- return this;
- }
- int min = NewElementsCapacity(old_capacity);
- int new_capacity = value > min ? value : min;
- if (!ShouldConvertToSlowElements(new_capacity)) {
- MaybeObject* result;
- if (elements_kind == FAST_ELEMENTS ||
- elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- SetFastElementsCapacityMode set_capacity_mode =
- elements_kind == FAST_SMI_ONLY_ELEMENTS
- ? kAllowSmiOnlyElements
- : kDontAllowSmiOnlyElements;
- result = SetFastElementsCapacityAndLength(new_capacity,
- value,
- set_capacity_mode);
- } else {
- ASSERT(elements_kind == FAST_DOUBLE_ELEMENTS);
- result = SetFastDoubleElementsCapacityAndLength(new_capacity,
- value);
- }
- if (result->IsFailure()) return result;
- return this;
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (IsJSArray()) {
- if (value == 0) {
- // If the length of a slow array is reset to zero, we clear
- // the array and flush backing storage. This has the added
- // benefit that the array returns to fast mode.
- Object* obj;
- { MaybeObject* maybe_obj = ResetElements();
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- } else {
- // Remove deleted elements.
- uint32_t old_length =
- static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
- element_dictionary()->RemoveNumberEntries(value, old_length);
- }
- JSArray::cast(this)->set_length(Smi::cast(smi_length));
- }
- return this;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case EXTERNAL_PIXEL_ELEMENTS:
- UNREACHABLE();
- break;
- }
- }
-
- // General slow case.
- if (len->IsNumber()) {
- uint32_t length;
- if (len->ToArrayIndex(&length)) {
- return SetSlowElements(len);
- } else {
- return ArrayLengthRangeError(GetHeap());
- }
- }
-
- // len is not a number so make the array size one and
- // set only element to len.
- Object* obj;
- MaybeObject* maybe_obj = GetHeap()->AllocateFixedArray(1);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- FixedArray::cast(obj)->set(0, len);
-
- maybe_obj = EnsureCanContainElements(&len, 1);
- if (maybe_obj->IsFailure()) return maybe_obj;
-
- if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(1));
- set_elements(FixedArray::cast(obj));
- return this;
+ return GetElementsAccessor()->SetLength(this, len);
}
@@ -8714,7 +8624,7 @@ MaybeObject* Map::PutPrototypeTransition(Object* prototype, Map* map) {
// Grow array by factor 2 over and above what we need.
{ MaybeObject* maybe_cache =
GetHeap()->AllocateFixedArray(transitions * 2 * step + header);
- if (!maybe_cache->To<FixedArray>(&new_cache)) return maybe_cache;
+ if (!maybe_cache->To(&new_cache)) return maybe_cache;
}
for (int i = 0; i < capacity * step; i++) {
@@ -8817,80 +8727,14 @@ MaybeObject* JSReceiver::SetPrototype(Object* value,
MaybeObject* JSObject::EnsureCanContainElements(Arguments* args,
uint32_t first_arg,
- uint32_t arg_count) {
- return EnsureCanContainElements(args->arguments() - first_arg, arg_count);
-}
-
-
-bool JSObject::HasElementPostInterceptor(JSReceiver* receiver, uint32_t index) {
- switch (GetElementsKind()) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) {
- return true;
- }
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
- if ((index < length) &&
- !FixedDoubleArray::cast(elements())->is_the_hole(index)) {
- return true;
- }
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS: {
- ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
- if (index < static_cast<uint32_t>(pixels->length())) {
- return true;
- }
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- ExternalArray* array = ExternalArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- return true;
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound) {
- return true;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNREACHABLE();
- break;
- }
-
- // Handle [] on String objects.
- if (this->IsStringObjectWithCharacterAt(index)) return true;
-
- Object* pt = GetPrototype();
- if (pt->IsNull()) return false;
- if (pt->IsJSProxy()) {
- // We need to follow the spec and simulate a call to [[GetOwnProperty]].
- return JSProxy::cast(pt)->GetElementAttributeWithHandler(
- receiver, index) != ABSENT;
- }
- return JSObject::cast(pt)->HasElementWithReceiver(receiver, index);
+ uint32_t arg_count,
+ EnsureElementsMode mode) {
+ // Elements in |Arguments| are ordered backwards (because they're on the
+ // stack), but the method that's called here iterates over them in forward
+ // direction.
+ return EnsureCanContainElements(
+ args->arguments() - first_arg - (arg_count - 1),
+ arg_count, mode);
}
@@ -8933,7 +8777,21 @@ bool JSObject::HasElementWithInterceptor(JSReceiver* receiver, uint32_t index) {
}
if (!result.IsEmpty()) return true;
}
- return holder_handle->HasElementPostInterceptor(*receiver_handle, index);
+
+ if (holder_handle->GetElementsAccessor()->HasElement(
+ *receiver_handle, *holder_handle, index)) {
+ return true;
+ }
+
+ if (holder_handle->IsStringObjectWithCharacterAt(index)) return true;
+ Object* pt = holder_handle->GetPrototype();
+ if (pt->IsJSProxy()) {
+ // We need to follow the spec and simulate a call to [[GetOwnProperty]].
+ return JSProxy::cast(pt)->GetElementAttributeWithHandler(
+ receiver, index) != ABSENT;
+ }
+ if (pt->IsNull()) return false;
+ return JSObject::cast(pt)->HasElementWithReceiver(*receiver_handle, index);
}
@@ -9008,7 +8866,7 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
case DICTIONARY_ELEMENTS: {
if (element_dictionary()->FindEntry(index) !=
- NumberDictionary::kNotFound) {
+ SeededNumberDictionary::kNotFound) {
return DICTIONARY_ELEMENT;
}
break;
@@ -9025,8 +8883,9 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
// If not aliased, check the arguments.
FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
if (arguments->IsDictionary()) {
- NumberDictionary* dictionary = NumberDictionary::cast(arguments);
- if (dictionary->FindEntry(index) != NumberDictionary::kNotFound) {
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(arguments);
+ if (dictionary->FindEntry(index) != SeededNumberDictionary::kNotFound) {
return DICTIONARY_ELEMENT;
}
} else {
@@ -9042,28 +8901,6 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
-bool JSObject::HasElementInElements(FixedArray* elements,
- ElementsKind kind,
- uint32_t index) {
- ASSERT(kind == FAST_ELEMENTS || kind == DICTIONARY_ELEMENTS);
- if (kind == FAST_ELEMENTS) {
- int length = IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : elements->length();
- if (index < static_cast<uint32_t>(length) &&
- !elements->get(index)->IsTheHole()) {
- return true;
- }
- } else {
- if (NumberDictionary::cast(elements)->FindEntry(index) !=
- NumberDictionary::kNotFound) {
- return true;
- }
- }
- return false;
-}
-
-
bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
@@ -9079,68 +8916,9 @@ bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
return HasElementWithInterceptor(receiver, index);
}
- ElementsKind kind = GetElementsKind();
- switch (kind) {
- case FAST_SMI_ONLY_ELEMENTS:
- case FAST_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if ((index < length) &&
- !FixedArray::cast(elements())->get(index)->IsTheHole()) return true;
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>
- (Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
- if ((index < length) &&
- !FixedDoubleArray::cast(elements())->is_the_hole(index)) return true;
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS: {
- ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
- if (index < static_cast<uint32_t>(pixels->length())) {
- return true;
- }
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- ExternalArray* array = ExternalArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- return true;
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- if (element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound) {
- return true;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- Object* probe =
- (index < length - 2) ? parameter_map->get(index + 2) : NULL;
- if (probe != NULL && !probe->IsTheHole()) return true;
-
- // Not a mapped parameter, check the arguments.
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : FAST_ELEMENTS;
- if (HasElementInElements(arguments, kind, index)) return true;
- break;
- }
+ ElementsAccessor* accessor = GetElementsAccessor();
+ if (accessor->HasElement(receiver, this, index)) {
+ return true;
}
// Handle [] on String objects.
@@ -9159,8 +8937,10 @@ bool JSObject::HasElementWithReceiver(JSReceiver* receiver, uint32_t index) {
MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool check_prototype) {
+ bool check_prototype,
+ SetPropertyMode set_mode) {
Isolate* isolate = GetIsolate();
// Make sure that the top context does not change when doing
// callbacks or interceptor calls.
@@ -9188,8 +8968,10 @@ MaybeObject* JSObject::SetElementWithInterceptor(uint32_t index,
MaybeObject* raw_result =
this_handle->SetElementWithoutInterceptor(index,
*value_handle,
+ attributes,
strict_mode,
- check_prototype);
+ check_prototype,
+ set_mode);
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return raw_result;
}
@@ -9227,8 +9009,8 @@ MaybeObject* JSObject::GetElementWithCallback(Object* receiver,
}
// __defineGetter__ callback
- if (structure->IsFixedArray()) {
- Object* getter = FixedArray::cast(structure)->get(kGetterIndex);
+ if (structure->IsAccessorPair()) {
+ Object* getter = AccessorPair::cast(structure)->getter();
if (getter->IsSpecFunction()) {
// TODO(rossberg): nicer would be to cast to some JSCallable here...
return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter));
@@ -9284,8 +9066,8 @@ MaybeObject* JSObject::SetElementWithCallback(Object* structure,
return *value_handle;
}
- if (structure->IsFixedArray()) {
- Handle<Object> setter(FixedArray::cast(structure)->get(kSetterIndex));
+ if (structure->IsAccessorPair()) {
+ Handle<Object> setter(AccessorPair::cast(structure)->setter());
if (setter->IsSpecFunction()) {
// TODO(rossberg): nicer would be to cast to some JSCallable here...
return SetPropertyWithDefinedSetter(JSReceiver::cast(*setter), value);
@@ -9345,10 +9127,8 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
if (backing_store->map() == GetHeap()->non_strict_arguments_elements_map()) {
backing_store = FixedArray::cast(backing_store->get(1));
} else {
- Object* writable;
MaybeObject* maybe = EnsureWritableFastElements();
- if (!maybe->ToObject(&writable)) return maybe;
- backing_store = FixedArray::cast(writable);
+ if (!maybe->To(&backing_store)) return maybe;
}
uint32_t capacity = static_cast<uint32_t>(backing_store->length());
@@ -9387,7 +9167,8 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
if (convert_to_slow) {
MaybeObject* result = NormalizeElements();
if (result->IsFailure()) return result;
- return SetDictionaryElement(index, value, strict_mode, check_prototype);
+ return SetDictionaryElement(index, value, NONE, strict_mode,
+ check_prototype);
}
}
// Convert to fast double elements if appropriate.
@@ -9400,9 +9181,11 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
// Change elements kind from SMI_ONLY to generic FAST if necessary.
if (HasFastSmiOnlyElements() && !value->IsSmi()) {
- MaybeObject* maybe_new_map = GetElementsTransitionMap(FAST_ELEMENTS);
Map* new_map;
- if (!maybe_new_map->To<Map>(&new_map)) return maybe_new_map;
+ { MaybeObject* maybe_new_map = GetElementsTransitionMap(GetIsolate(),
+ FAST_ELEMENTS);
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ }
set_map(new_map);
if (FLAG_trace_elements_transitions) {
PrintElementsTransition(stdout, FAST_SMI_ONLY_ELEMENTS, elements(),
@@ -9411,17 +9194,18 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
}
// Increase backing store capacity if that's been decided previously.
if (new_capacity != capacity) {
- Object* new_elements;
+ FixedArray* new_elements;
SetFastElementsCapacityMode set_capacity_mode =
value->IsSmi() && HasFastSmiOnlyElements()
? kAllowSmiOnlyElements
: kDontAllowSmiOnlyElements;
- MaybeObject* maybe =
- SetFastElementsCapacityAndLength(new_capacity,
- array_length,
- set_capacity_mode);
- if (!maybe->ToObject(&new_elements)) return maybe;
- FixedArray::cast(new_elements)->set(index, value);
+ { MaybeObject* maybe =
+ SetFastElementsCapacityAndLength(new_capacity,
+ array_length,
+ set_capacity_mode);
+ if (!maybe->To(&new_elements)) return maybe;
+ }
+ new_elements->set(index, value);
return value;
}
// Finally, set the new element and length.
@@ -9436,8 +9220,10 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool check_prototype) {
+ bool check_prototype,
+ SetPropertyMode set_mode) {
ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements());
Isolate* isolate = GetIsolate();
Heap* heap = isolate->heap();
@@ -9446,31 +9232,51 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
FixedArray* elements = FixedArray::cast(this->elements());
bool is_arguments =
(elements->map() == heap->non_strict_arguments_elements_map());
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
if (is_arguments) {
- dictionary = NumberDictionary::cast(elements->get(1));
+ dictionary = SeededNumberDictionary::cast(elements->get(1));
} else {
- dictionary = NumberDictionary::cast(elements);
+ dictionary = SeededNumberDictionary::cast(elements);
}
int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
+ if (entry != SeededNumberDictionary::kNotFound) {
Object* element = dictionary->ValueAt(entry);
PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
+ if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) {
return SetElementWithCallback(element, index, value, this, strict_mode);
} else {
dictionary->UpdateMaxNumberKey(index);
- // If put fails in strict mode, throw an exception.
- if (!dictionary->ValueAtPut(entry, value) && strict_mode == kStrictMode) {
- Handle<Object> holder(this);
- Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
- Handle<Object> args[2] = { number, holder };
- Handle<Object> error =
- isolate->factory()->NewTypeError("strict_read_only_property",
- HandleVector(args, 2));
- return isolate->Throw(*error);
+ // If a value has not been initialized we allow writing to it even if it
+ // is read-only (a declared const that has not been initialized). If a
+ // value is being defined we skip attribute checks completely.
+ if (set_mode == DEFINE_PROPERTY) {
+ details = PropertyDetails(attributes, NORMAL, details.index());
+ dictionary->DetailsAtPut(entry, details);
+ } else if (details.IsReadOnly() && !element->IsTheHole()) {
+ if (strict_mode == kNonStrictMode) {
+ return isolate->heap()->undefined_value();
+ } else {
+ Handle<Object> holder(this);
+ Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
+ Handle<Object> args[2] = { number, holder };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("strict_read_only_property",
+ HandleVector(args, 2));
+ return isolate->Throw(*error);
+ }
}
+ // Elements of the arguments object in slow mode might be slow aliases.
+ if (is_arguments && element->IsAliasedArgumentsEntry()) {
+ AliasedArgumentsEntry* entry = AliasedArgumentsEntry::cast(element);
+ Context* context = Context::cast(elements->get(0));
+ int context_index = entry->aliased_context_slot();
+ ASSERT(!context->get(context_index)->IsTheHole());
+ context->set(context_index, value);
+ // For elements that are still writable we keep slow aliasing.
+ if (!details.IsReadOnly()) value = element;
+ }
+ dictionary->ValueAtPut(entry, value);
}
} else {
// Index not already used. Look for an accessor in the prototype chain.
@@ -9497,15 +9303,16 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
}
}
FixedArrayBase* new_dictionary;
- MaybeObject* maybe = dictionary->AtNumberPut(index, value);
- if (!maybe->To<FixedArrayBase>(&new_dictionary)) return maybe;
- if (dictionary != NumberDictionary::cast(new_dictionary)) {
+ PropertyDetails details = PropertyDetails(attributes, NORMAL);
+ MaybeObject* maybe = dictionary->AddNumberEntry(index, value, details);
+ if (!maybe->To(&new_dictionary)) return maybe;
+ if (dictionary != SeededNumberDictionary::cast(new_dictionary)) {
if (is_arguments) {
elements->set(1, new_dictionary);
} else {
set_elements(new_dictionary);
}
- dictionary = NumberDictionary::cast(new_dictionary);
+ dictionary = SeededNumberDictionary::cast(new_dictionary);
}
}
@@ -9524,11 +9331,20 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
} else {
new_length = dictionary->max_number_key() + 1;
}
- MaybeObject* result = CanConvertToFastDoubleElements()
+ SetFastElementsCapacityMode set_capacity_mode = FLAG_smi_only_arrays
+ ? kAllowSmiOnlyElements
+ : kDontAllowSmiOnlyElements;
+ bool has_smi_only_elements = false;
+ bool should_convert_to_fast_double_elements =
+ ShouldConvertToFastDoubleElements(&has_smi_only_elements);
+ if (has_smi_only_elements) {
+ set_capacity_mode = kForceSmiOnlyElements;
+ }
+ MaybeObject* result = should_convert_to_fast_double_elements
? SetFastDoubleElementsCapacityAndLength(new_length, new_length)
: SetFastElementsCapacityAndLength(new_length,
new_length,
- kDontAllowSmiOnlyElements);
+ set_capacity_mode);
if (result->IsFailure()) return result;
#ifdef DEBUG
if (FLAG_trace_normalization) {
@@ -9548,13 +9364,14 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
bool check_prototype) {
ASSERT(HasFastDoubleElements());
- FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
- uint32_t elms_length = static_cast<uint32_t>(elms->length());
+ FixedArrayBase* base_elms = FixedArrayBase::cast(elements());
+ uint32_t elms_length = static_cast<uint32_t>(base_elms->length());
// If storing to an element that isn't in the array, pass the store request
// up the prototype chain before storing in the receiver's elements.
if (check_prototype &&
- (index >= elms_length || elms->is_the_hole(index))) {
+ (index >= elms_length ||
+ FixedDoubleArray::cast(base_elms)->is_the_hole(index))) {
bool found;
MaybeObject* result = SetElementWithCallbackSetterInPrototypes(index,
value,
@@ -9589,6 +9406,7 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
// Check whether there is extra space in the fixed array.
if (index < elms_length) {
+ FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
elms->set(index, double_value);
if (IsJSArray()) {
// Update the length of the array if needed.
@@ -9627,25 +9445,64 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
ASSERT(HasDictionaryElements());
- return SetElement(index, value, strict_mode, check_prototype);
+ return SetElement(index, value, NONE, strict_mode, check_prototype);
}
MaybeObject* JSReceiver::SetElement(uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
bool check_proto) {
- return IsJSProxy()
- ? JSProxy::cast(this)->SetElementWithHandler(index, value, strict_mode)
- : JSObject::cast(this)->SetElement(index, value, strict_mode, check_proto)
- ;
+ if (IsJSProxy()) {
+ return JSProxy::cast(this)->SetElementWithHandler(
+ index, value, strict_mode);
+ } else {
+ return JSObject::cast(this)->SetElement(
+ index, value, attributes, strict_mode, check_proto);
+ }
+}
+
+
+Handle<Object> JSObject::SetOwnElement(Handle<JSObject> object,
+ uint32_t index,
+ Handle<Object> value,
+ StrictModeFlag strict_mode) {
+ ASSERT(!object->HasExternalArrayElements());
+ CALL_HEAP_FUNCTION(
+ object->GetIsolate(),
+ object->SetElement(index, *value, NONE, strict_mode, false),
+ Object);
+}
+
+
+Handle<Object> JSObject::SetElement(Handle<JSObject> object,
+ uint32_t index,
+ Handle<Object> value,
+ PropertyAttributes attr,
+ StrictModeFlag strict_mode,
+ SetPropertyMode set_mode) {
+ if (object->HasExternalArrayElements()) {
+ if (!value->IsSmi() && !value->IsHeapNumber() && !value->IsUndefined()) {
+ bool has_exception;
+ Handle<Object> number = Execution::ToNumber(value, &has_exception);
+ if (has_exception) return Handle<Object>();
+ value = number;
+ }
+ }
+ CALL_HEAP_FUNCTION(
+ object->GetIsolate(),
+ object->SetElement(index, *value, attr, strict_mode, true, set_mode),
+ Object);
}
MaybeObject* JSObject::SetElement(uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool check_prototype) {
+ bool check_prototype,
+ SetPropertyMode set_mode) {
// Check access rights if needed.
if (IsAccessCheckNeeded()) {
Heap* heap = GetHeap();
@@ -9663,29 +9520,59 @@ MaybeObject* JSObject::SetElement(uint32_t index,
ASSERT(proto->IsJSGlobalObject());
return JSObject::cast(proto)->SetElement(index,
value,
+ attributes,
strict_mode,
- check_prototype);
+ check_prototype,
+ set_mode);
+ }
+
+ // Don't allow element properties to be redefined for external arrays.
+ if (HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) {
+ Isolate* isolate = GetHeap()->isolate();
+ Handle<Object> number = isolate->factory()->NewNumberFromUint(index);
+ Handle<Object> args[] = { Handle<Object>(this), number };
+ Handle<Object> error = isolate->factory()->NewTypeError(
+ "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args)));
+ return isolate->Throw(*error);
+ }
+
+ // Normalize the elements to enable attributes on the property.
+ if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) {
+ SeededNumberDictionary* dictionary;
+ MaybeObject* maybe_object = NormalizeElements();
+ if (!maybe_object->To(&dictionary)) return maybe_object;
+ // Make sure that we never go back to fast case.
+ dictionary->set_requires_slow_elements();
}
// Check for lookup interceptor
if (HasIndexedInterceptor()) {
return SetElementWithInterceptor(index,
value,
+ attributes,
strict_mode,
- check_prototype);
+ check_prototype,
+ set_mode);
}
return SetElementWithoutInterceptor(index,
value,
+ attributes,
strict_mode,
- check_prototype);
+ check_prototype,
+ set_mode);
}
MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
Object* value,
+ PropertyAttributes attr,
StrictModeFlag strict_mode,
- bool check_prototype) {
+ bool check_prototype,
+ SetPropertyMode set_mode) {
+ ASSERT(HasDictionaryElements() ||
+ HasDictionaryArgumentsElements() ||
+ (attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0);
Isolate* isolate = GetIsolate();
switch (GetElementsKind()) {
case FAST_SMI_ONLY_ELEMENTS:
@@ -9733,7 +9620,8 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
return array->SetValue(index, value);
}
case DICTIONARY_ELEMENTS:
- return SetDictionaryElement(index, value, strict_mode, check_prototype);
+ return SetDictionaryElement(index, value, attr, strict_mode,
+ check_prototype, set_mode);
case NON_STRICT_ARGUMENTS_ELEMENTS: {
FixedArray* parameter_map = FixedArray::cast(elements());
uint32_t length = parameter_map->length();
@@ -9744,17 +9632,23 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
int context_index = Smi::cast(probe)->value();
ASSERT(!context->get(context_index)->IsTheHole());
context->set(context_index, value);
- return value;
- } else {
- // Object is not mapped, defer to the arguments.
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- return SetDictionaryElement(index, value, strict_mode,
- check_prototype);
- } else {
- return SetFastElement(index, value, strict_mode, check_prototype);
+ // Redefining attributes of an aliased element destroys fast aliasing.
+ if (set_mode == SET_PROPERTY || attr == NONE) return value;
+ parameter_map->set_the_hole(index + 2);
+ // For elements that are still writable we re-establish slow aliasing.
+ if ((attr & READ_ONLY) == 0) {
+ MaybeObject* maybe_entry =
+ isolate->heap()->AllocateAliasedArgumentsEntry(context_index);
+ if (!maybe_entry->ToObject(&value)) return maybe_entry;
}
}
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ if (arguments->IsDictionary()) {
+ return SetDictionaryElement(index, value, attr, strict_mode,
+ check_prototype, set_mode);
+ } else {
+ return SetFastElement(index, value, strict_mode, check_prototype);
+ }
}
}
// All possible cases have been handled above. Add a return to avoid the
@@ -9764,34 +9658,62 @@ MaybeObject* JSObject::SetElementWithoutInterceptor(uint32_t index,
}
-MUST_USE_RESULT MaybeObject* JSObject::TransitionElementsKind(
- ElementsKind to_kind) {
+Handle<Object> JSObject::TransitionElementsKind(Handle<JSObject> object,
+ ElementsKind to_kind) {
+ CALL_HEAP_FUNCTION(object->GetIsolate(),
+ object->TransitionElementsKind(to_kind),
+ Object);
+}
+
+
+MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) {
ElementsKind from_kind = map()->elements_kind();
+
+ Isolate* isolate = GetIsolate();
+ if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+ (to_kind == FAST_ELEMENTS ||
+ elements() == isolate->heap()->empty_fixed_array())) {
+ MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind);
+ Map* new_map;
+ if (!maybe_new_map->To(&new_map)) return maybe_new_map;
+ set_map(new_map);
+ if (FLAG_trace_elements_transitions) {
+ FixedArrayBase* elms = FixedArrayBase::cast(elements());
+ PrintElementsTransition(stdout, from_kind, elms, to_kind, elms);
+ }
+ return this;
+ }
+
FixedArrayBase* elms = FixedArrayBase::cast(elements());
uint32_t capacity = static_cast<uint32_t>(elms->length());
uint32_t length = capacity;
+
if (IsJSArray()) {
- CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
- }
- if (from_kind == FAST_SMI_ONLY_ELEMENTS) {
- if (to_kind == FAST_DOUBLE_ELEMENTS) {
- MaybeObject* maybe_result =
- SetFastDoubleElementsCapacityAndLength(capacity, length);
- if (maybe_result->IsFailure()) return maybe_result;
- return this;
- } else if (to_kind == FAST_ELEMENTS) {
- MaybeObject* maybe_new_map = GetElementsTransitionMap(FAST_ELEMENTS);
- Map* new_map;
- if (!maybe_new_map->To(&new_map)) return maybe_new_map;
- set_map(new_map);
- return this;
+ Object* raw_length = JSArray::cast(this)->length();
+ if (raw_length->IsUndefined()) {
+ // If length is undefined, then JSArray is being initialized and has no
+ // elements, assume a length of zero.
+ length = 0;
+ } else {
+ CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
}
- } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
+ }
+
+ if (from_kind == FAST_SMI_ONLY_ELEMENTS &&
+ to_kind == FAST_DOUBLE_ELEMENTS) {
+ MaybeObject* maybe_result =
+ SetFastDoubleElementsCapacityAndLength(capacity, length);
+ if (maybe_result->IsFailure()) return maybe_result;
+ return this;
+ }
+
+ if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) {
MaybeObject* maybe_result = SetFastElementsCapacityAndLength(
capacity, length, kDontAllowSmiOnlyElements);
if (maybe_result->IsFailure()) return maybe_result;
return this;
}
+
// This method should never be called for any other case than the ones
// handled above.
UNREACHABLE();
@@ -9856,10 +9778,9 @@ MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
Heap* heap = holder_handle->GetHeap();
ElementsAccessor* handler = holder_handle->GetElementsAccessor();
- MaybeObject* raw_result = handler->Get(holder_handle->elements(),
- index,
+ MaybeObject* raw_result = handler->Get(*this_handle,
*holder_handle,
- *this_handle);
+ index);
if (raw_result != heap->the_hole_value()) return raw_result;
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
@@ -9890,7 +9811,8 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
FixedArray::cast(FixedArray::cast(backing_store_base)->get(1));
backing_store = FixedArray::cast(backing_store_base);
if (backing_store->IsDictionary()) {
- NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(backing_store);
*capacity = dictionary->Capacity();
*used = dictionary->NumberOfElements();
break;
@@ -9905,8 +9827,8 @@ void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
}
break;
case DICTIONARY_ELEMENTS: {
- NumberDictionary* dictionary =
- NumberDictionary::cast(FixedArray::cast(elements()));
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(FixedArray::cast(elements()));
*capacity = dictionary->Capacity();
*used = dictionary->NumberOfElements();
break;
@@ -9951,8 +9873,8 @@ bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
int old_capacity = 0;
int used_elements = 0;
GetElementsCapacityAndUsage(&old_capacity, &used_elements);
- int dictionary_size = NumberDictionary::ComputeCapacity(used_elements) *
- NumberDictionary::kEntrySize;
+ int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) *
+ SeededNumberDictionary::kEntrySize;
return 3 * dictionary_size <= new_capacity;
}
@@ -9966,11 +9888,11 @@ bool JSObject::ShouldConvertToFastElements() {
if (IsAccessCheckNeeded()) return false;
FixedArray* elements = FixedArray::cast(this->elements());
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
if (elements->map() == GetHeap()->non_strict_arguments_elements_map()) {
- dictionary = NumberDictionary::cast(elements->get(1));
+ dictionary = SeededNumberDictionary::cast(elements->get(1));
} else {
- dictionary = NumberDictionary::cast(elements);
+ dictionary = SeededNumberDictionary::cast(elements);
}
// If an element has been added at a very high index in the elements
// dictionary, we cannot go back to fast case.
@@ -9985,22 +9907,31 @@ bool JSObject::ShouldConvertToFastElements() {
array_size = dictionary->max_number_key();
}
uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
- NumberDictionary::kEntrySize;
+ SeededNumberDictionary::kEntrySize;
return 2 * dictionary_size >= array_size;
}
-bool JSObject::CanConvertToFastDoubleElements() {
+bool JSObject::ShouldConvertToFastDoubleElements(
+ bool* has_smi_only_elements) {
+ *has_smi_only_elements = false;
if (FLAG_unbox_double_arrays) {
ASSERT(HasDictionaryElements());
- NumberDictionary* dictionary = NumberDictionary::cast(elements());
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(elements());
+ bool found_double = false;
for (int i = 0; i < dictionary->Capacity(); i++) {
Object* key = dictionary->KeyAt(i);
if (key->IsNumber()) {
- if (!dictionary->ValueAt(i)->IsNumber()) return false;
+ Object* value = dictionary->ValueAt(i);
+ if (!value->IsNumber()) return false;
+ if (!value->IsSmi()) {
+ found_double = true;
+ }
}
}
- return true;
+ *has_smi_only_elements = !found_double;
+ return found_double;
} else {
return false;
}
@@ -10207,7 +10138,7 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
}
case DICTIONARY_ELEMENTS: {
return element_dictionary()->FindEntry(index)
- != NumberDictionary::kNotFound;
+ != SeededNumberDictionary::kNotFound;
}
case NON_STRICT_ARGUMENTS_ELEMENTS:
UNIMPLEMENTED();
@@ -10231,29 +10162,14 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) {
LookupResult result(isolate);
LocalLookupRealNamedProperty(key, &result);
- return result.IsProperty() && (result.type() == CALLBACKS);
+ return result.IsFound() && (result.type() == CALLBACKS);
}
int JSObject::NumberOfLocalProperties(PropertyAttributes filter) {
- if (HasFastProperties()) {
- DescriptorArray* descs = map()->instance_descriptors();
- int result = 0;
- for (int i = 0; i < descs->number_of_descriptors(); i++) {
- PropertyDetails details(descs->GetDetails(i));
- if (details.IsProperty() && (details.attributes() & filter) == 0) {
- result++;
- }
- }
- return result;
- } else {
- return property_dictionary()->NumberOfElementsFilterAttributes(filter);
- }
-}
-
-
-int JSObject::NumberOfEnumProperties() {
- return NumberOfLocalProperties(static_cast<PropertyAttributes>(DONT_ENUM));
+ return HasFastProperties() ?
+ map()->NumberOfDescribedProperties(filter) :
+ property_dictionary()->NumberOfElementsFilterAttributes(filter);
}
@@ -10263,8 +10179,8 @@ void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) {
set(j, temp);
if (this != numbers) {
temp = numbers->get(i);
- numbers->set(i, numbers->get(j));
- numbers->set(j, temp);
+ numbers->set(i, Smi::cast(numbers->get(j)));
+ numbers->set(j, Smi::cast(temp));
}
}
@@ -10374,7 +10290,7 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) {
// purpose of this function is to provide reflection information for the object
// mirrors.
void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) {
- ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index));
+ ASSERT(storage->length() >= (NumberOfLocalProperties() - index));
if (HasFastProperties()) {
DescriptorArray* descs = map()->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
@@ -10476,7 +10392,7 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
if (storage != NULL) {
element_dictionary()->CopyKeysTo(storage,
filter,
- NumberDictionary::SORTED);
+ SeededNumberDictionary::SORTED);
}
counter += element_dictionary()->NumberOfElementsFilterAttributes(filter);
break;
@@ -10488,9 +10404,11 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
if (arguments->IsDictionary()) {
// Copy the keys from arguments first, because Dictionary::CopyKeysTo
// will insert in storage starting at index 0.
- NumberDictionary* dictionary = NumberDictionary::cast(arguments);
+ SeededNumberDictionary* dictionary =
+ SeededNumberDictionary::cast(arguments);
if (storage != NULL) {
- dictionary->CopyKeysTo(storage, filter, NumberDictionary::UNSORTED);
+ dictionary->CopyKeysTo(
+ storage, filter, SeededNumberDictionary::UNSORTED);
}
counter += dictionary->NumberOfElementsFilterAttributes(filter);
for (int i = 0; i < mapped_length; ++i) {
@@ -10577,74 +10495,87 @@ class StringSharedKey : public HashTableKey {
public:
StringSharedKey(String* source,
SharedFunctionInfo* shared,
- StrictModeFlag strict_mode)
+ LanguageMode language_mode,
+ int scope_position)
: source_(source),
shared_(shared),
- strict_mode_(strict_mode) { }
+ language_mode_(language_mode),
+ scope_position_(scope_position) { }
bool IsMatch(Object* other) {
if (!other->IsFixedArray()) return false;
- FixedArray* pair = FixedArray::cast(other);
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(pair->get(0));
+ FixedArray* other_array = FixedArray::cast(other);
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
if (shared != shared_) return false;
- int strict_unchecked = Smi::cast(pair->get(2))->value();
- ASSERT(strict_unchecked == kStrictMode ||
- strict_unchecked == kNonStrictMode);
- StrictModeFlag strict_mode = static_cast<StrictModeFlag>(strict_unchecked);
- if (strict_mode != strict_mode_) return false;
- String* source = String::cast(pair->get(1));
+ int language_unchecked = Smi::cast(other_array->get(2))->value();
+ ASSERT(language_unchecked == CLASSIC_MODE ||
+ language_unchecked == STRICT_MODE ||
+ language_unchecked == EXTENDED_MODE);
+ LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
+ if (language_mode != language_mode_) return false;
+ int scope_position = Smi::cast(other_array->get(3))->value();
+ if (scope_position != scope_position_) return false;
+ String* source = String::cast(other_array->get(1));
return source->Equals(source_);
}
static uint32_t StringSharedHashHelper(String* source,
SharedFunctionInfo* shared,
- StrictModeFlag strict_mode) {
+ LanguageMode language_mode,
+ int scope_position) {
uint32_t hash = source->Hash();
if (shared->HasSourceCode()) {
// Instead of using the SharedFunctionInfo pointer in the hash
// code computation, we use a combination of the hash of the
- // script source code and the start and end positions. We do
- // this to ensure that the cache entries can survive garbage
+ // script source code and the start position of the calling scope.
+ // We do this to ensure that the cache entries can survive garbage
// collection.
Script* script = Script::cast(shared->script());
hash ^= String::cast(script->source())->Hash();
- if (strict_mode == kStrictMode) hash ^= 0x8000;
- hash += shared->start_position();
+ if (language_mode == STRICT_MODE) hash ^= 0x8000;
+ if (language_mode == EXTENDED_MODE) hash ^= 0x0080;
+ hash += scope_position;
}
return hash;
}
uint32_t Hash() {
- return StringSharedHashHelper(source_, shared_, strict_mode_);
+ return StringSharedHashHelper(
+ source_, shared_, language_mode_, scope_position_);
}
uint32_t HashForObject(Object* obj) {
- FixedArray* pair = FixedArray::cast(obj);
- SharedFunctionInfo* shared = SharedFunctionInfo::cast(pair->get(0));
- String* source = String::cast(pair->get(1));
- int strict_unchecked = Smi::cast(pair->get(2))->value();
- ASSERT(strict_unchecked == kStrictMode ||
- strict_unchecked == kNonStrictMode);
- StrictModeFlag strict_mode = static_cast<StrictModeFlag>(strict_unchecked);
- return StringSharedHashHelper(source, shared, strict_mode);
+ FixedArray* other_array = FixedArray::cast(obj);
+ SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0));
+ String* source = String::cast(other_array->get(1));
+ int language_unchecked = Smi::cast(other_array->get(2))->value();
+ ASSERT(language_unchecked == CLASSIC_MODE ||
+ language_unchecked == STRICT_MODE ||
+ language_unchecked == EXTENDED_MODE);
+ LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked);
+ int scope_position = Smi::cast(other_array->get(3))->value();
+ return StringSharedHashHelper(
+ source, shared, language_mode, scope_position);
}
MUST_USE_RESULT MaybeObject* AsObject() {
Object* obj;
- { MaybeObject* maybe_obj = source_->GetHeap()->AllocateFixedArray(3);
+ { MaybeObject* maybe_obj = source_->GetHeap()->AllocateFixedArray(4);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- FixedArray* pair = FixedArray::cast(obj);
- pair->set(0, shared_);
- pair->set(1, source_);
- pair->set(2, Smi::FromInt(strict_mode_));
- return pair;
+ FixedArray* other_array = FixedArray::cast(obj);
+ other_array->set(0, shared_);
+ other_array->set(1, source_);
+ other_array->set(2, Smi::FromInt(language_mode_));
+ other_array->set(3, Smi::FromInt(scope_position_));
+ return other_array;
}
private:
String* source_;
SharedFunctionInfo* shared_;
- StrictModeFlag strict_mode_;
+ LanguageMode language_mode_;
+ int scope_position_;
};
@@ -10691,8 +10622,8 @@ class RegExpKey : public HashTableKey {
// Utf8SymbolKey carries a vector of chars as key.
class Utf8SymbolKey : public HashTableKey {
public:
- explicit Utf8SymbolKey(Vector<const char> string)
- : string_(string), hash_field_(0) { }
+ explicit Utf8SymbolKey(Vector<const char> string, uint32_t seed)
+ : string_(string), hash_field_(0), seed_(seed) { }
bool IsMatch(Object* string) {
return String::cast(string)->IsEqualTo(string_);
@@ -10702,8 +10633,8 @@ class Utf8SymbolKey : public HashTableKey {
if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
unibrow::Utf8InputBuffer<> buffer(string_.start(),
static_cast<unsigned>(string_.length()));
- chars_ = buffer.Length();
- hash_field_ = String::ComputeHashField(&buffer, chars_);
+ chars_ = buffer.Utf16Length();
+ hash_field_ = String::ComputeHashField(&buffer, chars_, seed_);
uint32_t result = hash_field_ >> String::kHashShift;
ASSERT(result != 0); // Ensure that the hash value of 0 is never computed.
return result;
@@ -10722,17 +10653,18 @@ class Utf8SymbolKey : public HashTableKey {
Vector<const char> string_;
uint32_t hash_field_;
int chars_; // Caches the number of characters when computing the hash code.
+ uint32_t seed_;
};
template <typename Char>
class SequentialSymbolKey : public HashTableKey {
public:
- explicit SequentialSymbolKey(Vector<const Char> string)
- : string_(string), hash_field_(0) { }
+ explicit SequentialSymbolKey(Vector<const Char> string, uint32_t seed)
+ : string_(string), hash_field_(0), seed_(seed) { }
uint32_t Hash() {
- StringHasher hasher(string_.length());
+ StringHasher hasher(string_.length(), seed_);
// Very long strings have a trivial hash that doesn't inspect the
// string contents.
@@ -10768,14 +10700,15 @@ class SequentialSymbolKey : public HashTableKey {
Vector<const Char> string_;
uint32_t hash_field_;
+ uint32_t seed_;
};
class AsciiSymbolKey : public SequentialSymbolKey<char> {
public:
- explicit AsciiSymbolKey(Vector<const char> str)
- : SequentialSymbolKey<char>(str) { }
+ AsciiSymbolKey(Vector<const char> str, uint32_t seed)
+ : SequentialSymbolKey<char>(str, seed) { }
bool IsMatch(Object* string) {
return String::cast(string)->IsAsciiEqualTo(string_);
@@ -10783,37 +10716,23 @@ class AsciiSymbolKey : public SequentialSymbolKey<char> {
MaybeObject* AsObject() {
if (hash_field_ == 0) Hash();
- MaybeObject *result = HEAP->AllocateAsciiSymbol(string_, hash_field_);
- if (!result->IsFailure() && result->ToObjectUnchecked()->IsSeqString()) {
- while (true) {
- Atomic32 my_symbol_id = next_symbol_id;
- if (my_symbol_id > Smi::kMaxValue)
- break;
- if (my_symbol_id == NoBarrier_CompareAndSwap(&next_symbol_id, my_symbol_id, my_symbol_id + 1)) {
- SeqString::cast(result->ToObjectUnchecked())->set_symbol_id(my_symbol_id);
- break;
- }
- }
- }
- return result;
+ return HEAP->AllocateAsciiSymbol(string_, hash_field_);
}
-
- static Atomic32 next_symbol_id;
};
-Atomic32 AsciiSymbolKey::next_symbol_id = 1;
class SubStringAsciiSymbolKey : public HashTableKey {
public:
explicit SubStringAsciiSymbolKey(Handle<SeqAsciiString> string,
int from,
- int length)
- : string_(string), from_(from), length_(length) { }
+ int length,
+ uint32_t seed)
+ : string_(string), from_(from), length_(length), seed_(seed) { }
uint32_t Hash() {
ASSERT(length_ >= 0);
ASSERT(from_ + length_ <= string_->length());
- StringHasher hasher(length_);
+ StringHasher hasher(length_, string_->GetHeap()->HashSeed());
// Very long strings have a trivial hash that doesn't inspect the
// string contents.
@@ -10865,13 +10784,14 @@ class SubStringAsciiSymbolKey : public HashTableKey {
int from_;
int length_;
uint32_t hash_field_;
+ uint32_t seed_;
};
class TwoByteSymbolKey : public SequentialSymbolKey<uc16> {
public:
- explicit TwoByteSymbolKey(Vector<const uc16> str)
- : SequentialSymbolKey<uc16>(str) { }
+ explicit TwoByteSymbolKey(Vector<const uc16> str, uint32_t seed)
+ : SequentialSymbolKey<uc16>(str, seed) { }
bool IsMatch(Object* string) {
return String::cast(string)->IsTwoByteEqualTo(string_);
@@ -10908,7 +10828,7 @@ class SymbolKey : public HashTableKey {
// Transform string to symbol if possible.
Map* map = heap->SymbolMapForString(string_);
if (map != NULL) {
- string_->set_map(map);
+ string_->set_map_no_write_barrier(map);
ASSERT(string_->IsSymbol());
return string_;
}
@@ -10969,7 +10889,7 @@ int StringDictionary::FindEntry(String* key) {
// Optimized for symbol key. Knowledge of the key type allows:
// 1. Move the check if the key is a symbol out of the loop.
- // 2. Avoid comparing hash codes in symbol to symbol comparision.
+ // 2. Avoid comparing hash codes in symbol to symbol comparison.
// 3. Detect a case when a dictionary key is not a symbol but the key is.
// In case of positive result the dictionary key may be replaced by
// the symbol with minimal performance penalty. It gives a chance to
@@ -10987,20 +10907,45 @@ int StringDictionary::FindEntry(String* key) {
if (element->IsUndefined()) break; // Empty entry.
if (key == element) return entry;
if (!element->IsSymbol() &&
- !element->IsNull() &&
+ !element->IsTheHole() &&
String::cast(element)->Equals(key)) {
// Replace a non-symbol key by the equivalent symbol for faster further
// lookups.
set(index, key);
return entry;
}
- ASSERT(element->IsNull() || !String::cast(element)->Equals(key));
+ ASSERT(element->IsTheHole() || !String::cast(element)->Equals(key));
entry = NextProbe(entry, count++, capacity);
}
return kNotFound;
}
+bool StringDictionary::ContainsTransition(int entry) {
+ switch (DetailsAt(entry).type()) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ return true;
+ case CALLBACKS: {
+ Object* value = ValueAt(entry);
+ if (!value->IsAccessorPair()) return false;
+ AccessorPair* accessors = AccessorPair::cast(value);
+ return accessors->getter()->IsMap() || accessors->setter()->IsMap();
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // Keep the compiler happy.
+ return false;
+}
+
+
template<typename Shape, typename Key>
MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
ASSERT(NumberOfElements() < new_table->Capacity());
@@ -11021,7 +10966,7 @@ MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) {
uint32_t from_index = EntryToIndex(i);
Object* k = get(from_index);
if (IsKey(k)) {
- uint32_t hash = Shape::HashForObject(key, k);
+ uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k);
uint32_t insertion_index =
EntryToIndex(new_table->FindInsertionEntry(hash));
for (int j = 0; j < Shape::kEntrySize; j++) {
@@ -11098,7 +11043,7 @@ uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) {
// EnsureCapacity will guarantee the hash table is never full.
while (true) {
Object* element = KeyAt(entry);
- if (element->IsUndefined() || element->IsNull()) break;
+ if (element->IsUndefined() || element->IsTheHole()) break;
entry = NextProbe(entry, count++, capacity);
}
return entry;
@@ -11119,39 +11064,49 @@ template class HashTable<ObjectHashTableShape<2>, Object*>;
template class Dictionary<StringDictionaryShape, String*>;
-template class Dictionary<NumberDictionaryShape, uint32_t>;
+template class Dictionary<SeededNumberDictionaryShape, uint32_t>;
-#ifndef __INTEL_COMPILER
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::Allocate(
- int);
+template class Dictionary<UnseededNumberDictionaryShape, uint32_t>;
+
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ Allocate(int at_least_space_for);
+
+template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
+ Allocate(int at_least_space_for);
template MaybeObject* Dictionary<StringDictionaryShape, String*>::Allocate(
int);
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::AtPut(
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::AtPut(
uint32_t, Object*);
-template Object* Dictionary<NumberDictionaryShape, uint32_t>::SlowReverseLookup(
- Object*);
+template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
+ AtPut(uint32_t, Object*);
+
+template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ SlowReverseLookup(Object* value);
+
+template Object* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
+ SlowReverseLookup(Object* value);
template Object* Dictionary<StringDictionaryShape, String*>::SlowReverseLookup(
Object*);
-template void Dictionary<NumberDictionaryShape, uint32_t>::CopyKeysTo(
+template void Dictionary<SeededNumberDictionaryShape, uint32_t>::CopyKeysTo(
FixedArray*,
PropertyAttributes,
- Dictionary<NumberDictionaryShape, uint32_t>::SortMode);
+ Dictionary<SeededNumberDictionaryShape, uint32_t>::SortMode);
template Object* Dictionary<StringDictionaryShape, String*>::DeleteProperty(
int, JSObject::DeleteMode);
-template Object* Dictionary<NumberDictionaryShape, uint32_t>::DeleteProperty(
- int, JSObject::DeleteMode);
+template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ DeleteProperty(int, JSObject::DeleteMode);
template MaybeObject* Dictionary<StringDictionaryShape, String*>::Shrink(
String*);
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::Shrink(
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Shrink(
uint32_t);
template void Dictionary<StringDictionaryShape, String*>::CopyKeysTo(
@@ -11170,33 +11125,42 @@ template MaybeObject*
Dictionary<StringDictionaryShape, String*>::GenerateNewEnumerationIndices();
template int
-Dictionary<NumberDictionaryShape, uint32_t>::NumberOfElementsFilterAttributes(
- PropertyAttributes);
+Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ NumberOfElementsFilterAttributes(PropertyAttributes);
+
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Add(
+ uint32_t, Object*, PropertyDetails);
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::Add(
+template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::Add(
uint32_t, Object*, PropertyDetails);
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ EnsureCapacity(int, uint32_t);
+
+template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
EnsureCapacity(int, uint32_t);
template MaybeObject* Dictionary<StringDictionaryShape, String*>::
EnsureCapacity(int, String*);
-template MaybeObject* Dictionary<NumberDictionaryShape, uint32_t>::AddEntry(
- uint32_t, Object*, PropertyDetails, uint32_t);
+template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::
+ AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
+
+template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::
+ AddEntry(uint32_t, Object*, PropertyDetails, uint32_t);
template MaybeObject* Dictionary<StringDictionaryShape, String*>::AddEntry(
String*, Object*, PropertyDetails, uint32_t);
template
-int Dictionary<NumberDictionaryShape, uint32_t>::NumberOfEnumElements();
+int Dictionary<SeededNumberDictionaryShape, uint32_t>::NumberOfEnumElements();
template
int Dictionary<StringDictionaryShape, String*>::NumberOfEnumElements();
template
-int HashTable<NumberDictionaryShape, uint32_t>::FindEntry(uint32_t);
-#endif
+int HashTable<SeededNumberDictionaryShape, uint32_t>::FindEntry(uint32_t);
+
// Collates undefined and unexisting elements below limit from position
// zero of the elements. The object stays in Dictionary mode.
@@ -11205,7 +11169,7 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
// Must stay in dictionary mode, either because of requires_slow_elements,
// or because we are not going to sort (and therefore compact) all of the
// elements.
- NumberDictionary* dict = element_dictionary();
+ SeededNumberDictionary* dict = element_dictionary();
HeapNumber* result_double = NULL;
if (limit > static_cast<uint32_t>(Smi::kMaxValue)) {
// Allocate space for result before we start mutating the object.
@@ -11218,10 +11182,10 @@ MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
Object* obj;
{ MaybeObject* maybe_obj =
- NumberDictionary::Allocate(dict->NumberOfElements());
+ SeededNumberDictionary::Allocate(dict->NumberOfElements());
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- NumberDictionary* new_dict = NumberDictionary::cast(obj);
+ SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj);
AssertNoAllocation no_alloc;
@@ -11305,7 +11269,7 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
if (HasDictionaryElements()) {
// Convert to fast elements containing only the existing properties.
// Ordering is irrelevant, since we are going to sort anyway.
- NumberDictionary* dict = element_dictionary();
+ SeededNumberDictionary* dict = element_dictionary();
if (IsJSArray() || dict->requires_slow_elements() ||
dict->max_number_key() >= limit) {
return PrepareSlowElementsForSort(limit);
@@ -11313,7 +11277,8 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
// Convert to fast elements.
Object* obj;
- { MaybeObject* maybe_obj = GetElementsTransitionMap(FAST_ELEMENTS);
+ { MaybeObject* maybe_obj = GetElementsTransitionMap(GetIsolate(),
+ FAST_ELEMENTS);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
Map* new_map = Map::cast(obj);
@@ -11560,7 +11525,7 @@ MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) {
MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
- float cast_value = 0;
+ float cast_value = static_cast<float>(OS::nan_value());
Heap* heap = GetHeap();
if (index < static_cast<uint32_t>(length())) {
if (value->IsSmi()) {
@@ -11570,7 +11535,7 @@ MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
double double_value = HeapNumber::cast(value)->value();
cast_value = static_cast<float>(double_value);
} else {
- // Clamp undefined to zero (default). All other types have been
+ // Clamp undefined to NaN (default). All other types have been
// converted to a number type further up in the call chain.
ASSERT(value->IsUndefined());
}
@@ -11581,7 +11546,7 @@ MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) {
MaybeObject* ExternalDoubleArray::SetValue(uint32_t index, Object* value) {
- double double_value = 0;
+ double double_value = OS::nan_value();
Heap* heap = GetHeap();
if (index < static_cast<uint32_t>(length())) {
if (value->IsSmi()) {
@@ -11590,7 +11555,7 @@ MaybeObject* ExternalDoubleArray::SetValue(uint32_t index, Object* value) {
} else if (value->IsHeapNumber()) {
double_value = HeapNumber::cast(value)->value();
} else {
- // Clamp undefined to zero (default). All other types have been
+ // Clamp undefined to NaN (default). All other types have been
// converted to a number type further up in the call chain.
ASSERT(value->IsUndefined());
}
@@ -11657,10 +11622,12 @@ MaybeObject* SymbolTable::LookupString(String* string, Object** s) {
// algorithm.
class TwoCharHashTableKey : public HashTableKey {
public:
- TwoCharHashTableKey(uint32_t c1, uint32_t c2)
+ TwoCharHashTableKey(uint32_t c1, uint32_t c2, uint32_t seed)
: c1_(c1), c2_(c2) {
// Char 1.
- uint32_t hash = c1 + (c1 << 10);
+ uint32_t hash = seed;
+ hash += c1;
+ hash += hash << 10;
hash ^= hash >> 6;
// Char 2.
hash += c2;
@@ -11670,9 +11637,9 @@ class TwoCharHashTableKey : public HashTableKey {
hash += hash << 3;
hash ^= hash >> 11;
hash += hash << 15;
- if (hash == 0) hash = 27;
+ if ((hash & String::kHashBitMask) == 0) hash = String::kZeroHash;
#ifdef DEBUG
- StringHasher hasher(2);
+ StringHasher hasher(2, seed);
hasher.AddCharacter(c1);
hasher.AddCharacter(c2);
// If this assert fails then we failed to reproduce the two-character
@@ -11729,7 +11696,7 @@ bool SymbolTable::LookupSymbolIfExists(String* string, String** symbol) {
bool SymbolTable::LookupTwoCharsSymbolIfExists(uint32_t c1,
uint32_t c2,
String** symbol) {
- TwoCharHashTableKey key(c1, c2);
+ TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed());
int entry = FindEntry(&key);
if (entry == kNotFound) {
return false;
@@ -11742,15 +11709,16 @@ bool SymbolTable::LookupTwoCharsSymbolIfExists(uint32_t c1,
}
-MaybeObject* SymbolTable::LookupSymbol(Vector<const char> str, Object** s) {
- Utf8SymbolKey key(str);
+MaybeObject* SymbolTable::LookupSymbol(Vector<const char> str,
+ Object** s) {
+ Utf8SymbolKey key(str, GetHeap()->HashSeed());
return LookupKey(&key, s);
}
MaybeObject* SymbolTable::LookupAsciiSymbol(Vector<const char> str,
Object** s) {
- AsciiSymbolKey key(str);
+ AsciiSymbolKey key(str, GetHeap()->HashSeed());
return LookupKey(&key, s);
}
@@ -11759,14 +11727,14 @@ MaybeObject* SymbolTable::LookupSubStringAsciiSymbol(Handle<SeqAsciiString> str,
int from,
int length,
Object** s) {
- SubStringAsciiSymbolKey key(str, from, length);
+ SubStringAsciiSymbolKey key(str, from, length, GetHeap()->HashSeed());
return LookupKey(&key, s);
}
MaybeObject* SymbolTable::LookupTwoByteSymbol(Vector<const uc16> str,
Object** s) {
- TwoByteSymbolKey key(str);
+ TwoByteSymbolKey key(str, GetHeap()->HashSeed());
return LookupKey(&key, s);
}
@@ -11815,8 +11783,12 @@ Object* CompilationCacheTable::Lookup(String* src) {
Object* CompilationCacheTable::LookupEval(String* src,
Context* context,
- StrictModeFlag strict_mode) {
- StringSharedKey key(src, context->closure()->shared(), strict_mode);
+ LanguageMode language_mode,
+ int scope_position) {
+ StringSharedKey key(src,
+ context->closure()->shared(),
+ language_mode,
+ scope_position);
int entry = FindEntry(&key);
if (entry == kNotFound) return GetHeap()->undefined_value();
return get(EntryToIndex(entry) + 1);
@@ -11851,10 +11823,12 @@ MaybeObject* CompilationCacheTable::Put(String* src, Object* value) {
MaybeObject* CompilationCacheTable::PutEval(String* src,
Context* context,
- SharedFunctionInfo* value) {
+ SharedFunctionInfo* value,
+ int scope_position) {
StringSharedKey key(src,
context->closure()->shared(),
- value->strict_mode_flag());
+ value->language_mode(),
+ scope_position);
Object* obj;
{ MaybeObject* maybe_obj = EnsureCapacity(1, &key);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
@@ -11898,13 +11872,13 @@ MaybeObject* CompilationCacheTable::PutRegExp(String* src,
void CompilationCacheTable::Remove(Object* value) {
- Object* null_value = GetHeap()->null_value();
+ Object* the_hole_value = GetHeap()->the_hole_value();
for (int entry = 0, size = Capacity(); entry < size; entry++) {
int entry_index = EntryToIndex(entry);
int value_index = entry_index + 1;
if (get(value_index) == value) {
- NoWriteBarrierSet(this, entry_index, null_value);
- NoWriteBarrierSet(this, value_index, null_value);
+ NoWriteBarrierSet(this, entry_index, the_hole_value);
+ NoWriteBarrierSet(this, value_index, the_hole_value);
ElementRemoved();
}
}
@@ -12057,30 +12031,6 @@ MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) {
}
-void NumberDictionary::RemoveNumberEntries(uint32_t from, uint32_t to) {
- // Do nothing if the interval [from, to) is empty.
- if (from >= to) return;
-
- Heap* heap = GetHeap();
- int removed_entries = 0;
- Object* sentinel = heap->null_value();
- int capacity = Capacity();
- for (int i = 0; i < capacity; i++) {
- Object* key = KeyAt(i);
- if (key->IsNumber()) {
- uint32_t number = static_cast<uint32_t>(key->Number());
- if (from <= number && number < to) {
- SetEntry(i, sentinel, sentinel);
- removed_entries++;
- }
- }
- }
-
- // Update the number of elements.
- ElementsRemoved(removed_entries);
-}
-
-
template<typename Shape, typename Key>
Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
JSReceiver::DeleteMode mode) {
@@ -12090,7 +12040,7 @@ Object* Dictionary<Shape, Key>::DeleteProperty(int entry,
if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) {
return heap->false_value();
}
- SetEntry(entry, heap->null_value(), heap->null_value());
+ SetEntry(entry, heap->the_hole_value(), heap->the_hole_value());
HashTable<Shape, Key>::ElementRemoved();
return heap->true_value();
}
@@ -12123,8 +12073,9 @@ MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) {
if (!maybe_k->ToObject(&k)) return maybe_k;
}
PropertyDetails details = PropertyDetails(NONE, NORMAL);
- return Dictionary<Shape, Key>::cast(obj)->
- AddEntry(key, value, details, Shape::Hash(key));
+
+ return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
+ Dictionary<Shape, Key>::Hash(key));
}
@@ -12139,8 +12090,9 @@ MaybeObject* Dictionary<Shape, Key>::Add(Key key,
{ MaybeObject* maybe_obj = EnsureCapacity(1, key);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- return Dictionary<Shape, Key>::cast(obj)->
- AddEntry(key, value, details, Shape::Hash(key));
+
+ return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details,
+ Dictionary<Shape, Key>::Hash(key));
}
@@ -12173,7 +12125,7 @@ MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key,
}
-void NumberDictionary::UpdateMaxNumberKey(uint32_t key) {
+void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) {
// If the dictionary requires slow elements an element has already
// been added at a high index.
if (requires_slow_elements()) return;
@@ -12192,31 +12144,65 @@ void NumberDictionary::UpdateMaxNumberKey(uint32_t key) {
}
-MaybeObject* NumberDictionary::AddNumberEntry(uint32_t key,
- Object* value,
- PropertyDetails details) {
+MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key,
+ Object* value,
+ PropertyDetails details) {
UpdateMaxNumberKey(key);
SLOW_ASSERT(this->FindEntry(key) == kNotFound);
return Add(key, value, details);
}
-MaybeObject* NumberDictionary::AtNumberPut(uint32_t key, Object* value) {
+MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key,
+ Object* value) {
+ SLOW_ASSERT(this->FindEntry(key) == kNotFound);
+ return Add(key, value, PropertyDetails(NONE, NORMAL));
+}
+
+
+MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) {
UpdateMaxNumberKey(key);
return AtPut(key, value);
}
-MaybeObject* NumberDictionary::Set(uint32_t key,
- Object* value,
- PropertyDetails details) {
+MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key,
+ Object* value) {
+ return AtPut(key, value);
+}
+
+
+Handle<SeededNumberDictionary> SeededNumberDictionary::Set(
+ Handle<SeededNumberDictionary> dictionary,
+ uint32_t index,
+ Handle<Object> value,
+ PropertyDetails details) {
+ CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
+ dictionary->Set(index, *value, details),
+ SeededNumberDictionary);
+}
+
+
+Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set(
+ Handle<UnseededNumberDictionary> dictionary,
+ uint32_t index,
+ Handle<Object> value) {
+ CALL_HEAP_FUNCTION(dictionary->GetIsolate(),
+ dictionary->Set(index, *value),
+ UnseededNumberDictionary);
+}
+
+
+MaybeObject* SeededNumberDictionary::Set(uint32_t key,
+ Object* value,
+ PropertyDetails details) {
int entry = FindEntry(key);
if (entry == kNotFound) return AddNumberEntry(key, value, details);
// Preserve enumeration index.
details = PropertyDetails(details.attributes(),
details.type(),
DetailsAt(entry).index());
- MaybeObject* maybe_object_key = NumberDictionaryShape::AsObject(key);
+ MaybeObject* maybe_object_key = SeededNumberDictionaryShape::AsObject(key);
Object* object_key;
if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
SetEntry(entry, object_key, value, details);
@@ -12224,6 +12210,18 @@ MaybeObject* NumberDictionary::Set(uint32_t key,
}
+MaybeObject* UnseededNumberDictionary::Set(uint32_t key,
+ Object* value) {
+ int entry = FindEntry(key);
+ if (entry == kNotFound) return AddNumberEntry(key, value);
+ MaybeObject* maybe_object_key = UnseededNumberDictionaryShape::AsObject(key);
+ Object* object_key;
+ if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key;
+ SetEntry(entry, object_key, value);
+ return this;
+}
+
+
template<typename Shape, typename Key>
int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes(
@@ -12438,6 +12436,11 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
details.index());
descriptors->Set(next_descriptor++, &d, witness);
} else if (type == CALLBACKS) {
+ if (value->IsAccessorPair()) {
+ MaybeObject* maybe_copy =
+ AccessorPair::cast(value)->CopyWithoutTransitions();
+ if (!maybe_copy->To(&value)) return maybe_copy;
+ }
CallbacksDescriptor d(String::cast(key),
value,
details.attributes(),
@@ -12474,6 +12477,8 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
bool ObjectHashSet::Contains(Object* key) {
+ ASSERT(IsKey(key));
+
// If the object does not have an identity hash, it was never used as a key.
{ MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return false;
@@ -12483,6 +12488,8 @@ bool ObjectHashSet::Contains(Object* key) {
MaybeObject* ObjectHashSet::Add(Object* key) {
+ ASSERT(IsKey(key));
+
// Make sure the key object has an identity hash code.
int hash;
{ MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
@@ -12508,6 +12515,8 @@ MaybeObject* ObjectHashSet::Add(Object* key) {
MaybeObject* ObjectHashSet::Remove(Object* key) {
+ ASSERT(IsKey(key));
+
// If the object does not have an identity hash, it was never used as a key.
{ MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
if (maybe_hash->ToObjectUnchecked()->IsUndefined()) return this;
@@ -12518,13 +12527,15 @@ MaybeObject* ObjectHashSet::Remove(Object* key) {
if (entry == kNotFound) return this;
// Remove entry and try to shrink this hash set.
- set_null(EntryToIndex(entry));
+ set_the_hole(EntryToIndex(entry));
ElementRemoved();
return Shrink(key);
}
Object* ObjectHashTable::Lookup(Object* key) {
+ ASSERT(IsKey(key));
+
// If the object does not have an identity hash, it was never used as a key.
{ MaybeObject* maybe_hash = key->GetHash(OMIT_CREATION);
if (maybe_hash->ToObjectUnchecked()->IsUndefined()) {
@@ -12538,6 +12549,8 @@ Object* ObjectHashTable::Lookup(Object* key) {
MaybeObject* ObjectHashTable::Put(Object* key, Object* value) {
+ ASSERT(IsKey(key));
+
// Make sure the key object has an identity hash code.
int hash;
{ MaybeObject* maybe_hash = key->GetHash(ALLOW_CREATION);
@@ -12577,9 +12590,9 @@ void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) {
}
-void ObjectHashTable::RemoveEntry(int entry, Heap* heap) {
- set_null(heap, EntryToIndex(entry));
- set_null(heap, EntryToIndex(entry) + 1);
+void ObjectHashTable::RemoveEntry(int entry) {
+ set_the_hole(EntryToIndex(entry));
+ set_the_hole(EntryToIndex(entry) + 1);
ElementRemoved();
}
@@ -12837,4 +12850,133 @@ int BreakPointInfo::GetBreakPointCount() {
#endif // ENABLE_DEBUGGER_SUPPORT
+MaybeObject* JSDate::GetField(Object* object, Smi* index) {
+ return JSDate::cast(object)->DoGetField(
+ static_cast<FieldIndex>(index->value()));
+}
+
+
+Object* JSDate::DoGetField(FieldIndex index) {
+ ASSERT(index != kDateValue);
+
+ DateCache* date_cache = GetIsolate()->date_cache();
+
+ if (index < kFirstUncachedField) {
+ Object* stamp = cache_stamp();
+ if (stamp != date_cache->stamp() && stamp->IsSmi()) {
+ // Since the stamp is not NaN, the value is also not NaN.
+ int64_t local_time_ms =
+ date_cache->ToLocal(static_cast<int64_t>(value()->Number()));
+ SetLocalFields(local_time_ms, date_cache);
+ }
+ switch (index) {
+ case kYear: return year();
+ case kMonth: return month();
+ case kDay: return day();
+ case kWeekday: return weekday();
+ case kHour: return hour();
+ case kMinute: return min();
+ case kSecond: return sec();
+ default: UNREACHABLE();
+ }
+ }
+
+ if (index >= kFirstUTCField) {
+ return GetUTCField(index, value()->Number(), date_cache);
+ }
+
+ double time = value()->Number();
+ if (isnan(time)) return GetIsolate()->heap()->nan_value();
+
+ int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time));
+ int days = DateCache::DaysFromTime(local_time_ms);
+
+ if (index == kDays) return Smi::FromInt(days);
+
+ int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
+ if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000);
+ ASSERT(index == kTimeInDay);
+ return Smi::FromInt(time_in_day_ms);
+}
+
+
+Object* JSDate::GetUTCField(FieldIndex index,
+ double value,
+ DateCache* date_cache) {
+ ASSERT(index >= kFirstUTCField);
+
+ if (isnan(value)) return GetIsolate()->heap()->nan_value();
+
+ int64_t time_ms = static_cast<int64_t>(value);
+
+ if (index == kTimezoneOffset) {
+ return Smi::FromInt(date_cache->TimezoneOffset(time_ms));
+ }
+
+ int days = DateCache::DaysFromTime(time_ms);
+
+ if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days));
+
+ if (index <= kDayUTC) {
+ int year, month, day;
+ date_cache->YearMonthDayFromDays(days, &year, &month, &day);
+ if (index == kYearUTC) return Smi::FromInt(year);
+ if (index == kMonthUTC) return Smi::FromInt(month);
+ ASSERT(index == kDayUTC);
+ return Smi::FromInt(day);
+ }
+
+ int time_in_day_ms = DateCache::TimeInDay(time_ms, days);
+ switch (index) {
+ case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000));
+ case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60);
+ case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60);
+ case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000);
+ case kDaysUTC: return Smi::FromInt(days);
+ case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms);
+ default: UNREACHABLE();
+ }
+
+ UNREACHABLE();
+ return NULL;
+}
+
+
+void JSDate::SetValue(Object* value, bool is_value_nan) {
+ set_value(value);
+ if (is_value_nan) {
+ HeapNumber* nan = GetIsolate()->heap()->nan_value();
+ set_cache_stamp(nan, SKIP_WRITE_BARRIER);
+ set_year(nan, SKIP_WRITE_BARRIER);
+ set_month(nan, SKIP_WRITE_BARRIER);
+ set_day(nan, SKIP_WRITE_BARRIER);
+ set_hour(nan, SKIP_WRITE_BARRIER);
+ set_min(nan, SKIP_WRITE_BARRIER);
+ set_sec(nan, SKIP_WRITE_BARRIER);
+ set_weekday(nan, SKIP_WRITE_BARRIER);
+ } else {
+ set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER);
+ }
+}
+
+
+void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) {
+ int days = DateCache::DaysFromTime(local_time_ms);
+ int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days);
+ int year, month, day;
+ date_cache->YearMonthDayFromDays(days, &year, &month, &day);
+ int weekday = date_cache->Weekday(days);
+ int hour = time_in_day_ms / (60 * 60 * 1000);
+ int min = (time_in_day_ms / (60 * 1000)) % 60;
+ int sec = (time_in_day_ms / 1000) % 60;
+ set_cache_stamp(date_cache->stamp());
+ set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER);
+ set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER);
+ set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER);
+ set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER);
+ set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER);
+ set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER);
+ set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER);
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/objects.h b/src/3rdparty/v8/src/objects.h
index 0c0dd8f..76d8351 100644
--- a/src/3rdparty/v8/src/objects.h
+++ b/src/3rdparty/v8/src/objects.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,6 +31,7 @@
#include "allocation.h"
#include "builtins.h"
#include "list.h"
+#include "property-details.h"
#include "smart-array-pointer.h"
#include "unicode-inl.h"
#if V8_TARGET_ARCH_ARM
@@ -63,6 +64,7 @@
// - JSBuiltinsObject
// - JSGlobalProxy
// - JSValue
+// - JSDate
// - JSMessageObject
// - JSProxy
// - JSFunctionProxy
@@ -78,7 +80,7 @@
// - MapCache
// - Context
// - JSFunctionResultCache
-// - SerializedScopeInfo
+// - ScopeInfo
// - FixedDoubleArray
// - ExternalArray
// - ExternalPixelArray
@@ -106,6 +108,7 @@
// - SharedFunctionInfo
// - Struct
// - AccessorInfo
+// - AccessorPair
// - AccessCheckInfo
// - InterceptorInfo
// - CallHandlerInfo
@@ -124,18 +127,6 @@
// HeapObject: [32 bit direct pointer] (4 byte aligned) | 01
// Failure: [30 bit signed int] 11
-// Ecma-262 3rd 8.6.1
-enum PropertyAttributes {
- NONE = v8::None,
- READ_ONLY = v8::ReadOnly,
- DONT_ENUM = v8::DontEnum,
- DONT_DELETE = v8::DontDelete,
- ABSENT = 16 // Used in runtime to indicate a property is absent.
- // ABSENT can never be stored in or returned from a descriptor's attributes
- // bitfield. It is only used as a return value meaning the attributes of
- // a non-existent property.
-};
-
namespace v8 {
namespace internal {
@@ -173,76 +164,22 @@ enum ElementsKind {
LAST_ELEMENTS_KIND = EXTERNAL_PIXEL_ELEMENTS
};
-static const int kElementsKindCount =
- LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
-
-void PrintElementsKind(FILE* out, ElementsKind kind);
-
-// PropertyDetails captures type and attributes for a property.
-// They are used both in property dictionaries and instance descriptors.
-class PropertyDetails BASE_EMBEDDED {
- public:
- PropertyDetails(PropertyAttributes attributes,
- PropertyType type,
- int index = 0) {
- ASSERT(TypeField::is_valid(type));
- ASSERT(AttributesField::is_valid(attributes));
- ASSERT(StorageField::is_valid(index));
-
- value_ = TypeField::encode(type)
- | AttributesField::encode(attributes)
- | StorageField::encode(index);
-
- ASSERT(type == this->type());
- ASSERT(attributes == this->attributes());
- ASSERT(index == this->index());
- }
-
- // Conversion for storing details as Object*.
- explicit inline PropertyDetails(Smi* smi);
- inline Smi* AsSmi();
-
- PropertyType type() { return TypeField::decode(value_); }
-
- bool IsTransition() {
- PropertyType t = type();
- ASSERT(t != INTERCEPTOR);
- return t == MAP_TRANSITION || t == CONSTANT_TRANSITION ||
- t == ELEMENTS_TRANSITION;
- }
-
- bool IsProperty() {
- return type() < FIRST_PHANTOM_PROPERTY_TYPE;
- }
-
- PropertyAttributes attributes() { return AttributesField::decode(value_); }
-
- int index() { return StorageField::decode(value_); }
-
- inline PropertyDetails AsDeleted();
-
- static bool IsValidIndex(int index) {
- return StorageField::is_valid(index);
- }
-
- bool IsReadOnly() { return (attributes() & READ_ONLY) != 0; }
- bool IsDontDelete() { return (attributes() & DONT_DELETE) != 0; }
- bool IsDontEnum() { return (attributes() & DONT_ENUM) != 0; }
- bool IsDeleted() { return DeletedField::decode(value_) != 0;}
+enum CompareMapMode {
+ REQUIRE_EXACT_MAP,
+ ALLOW_ELEMENT_TRANSITION_MAPS
+};
- // Bit fields in value_ (type, shift, size). Must be public so the
- // constants can be embedded in generated code.
- class TypeField: public BitField<PropertyType, 0, 4> {};
- class AttributesField: public BitField<PropertyAttributes, 4, 3> {};
- class DeletedField: public BitField<uint32_t, 7, 1> {};
- class StorageField: public BitField<uint32_t, 8, 32-8> {};
+enum KeyedAccessGrowMode {
+ DO_NOT_ALLOW_JSARRAY_GROWTH,
+ ALLOW_JSARRAY_GROWTH
+};
- static const int kInitialIndex = 1;
+const int kElementsKindCount = LAST_ELEMENTS_KIND - FIRST_ELEMENTS_KIND + 1;
- private:
- uint32_t value_;
-};
+void PrintElementsKind(FILE* out, ElementsKind kind);
+inline bool IsMoreGeneralElementsKindTransition(ElementsKind from_kind,
+ ElementsKind to_kind);
// Setter that skips the write barrier if mode is SKIP_WRITE_BARRIER.
enum WriteBarrierMode { SKIP_WRITE_BARRIER, UPDATE_WRITE_BARRIER };
@@ -272,7 +209,7 @@ enum CreationFlag {
// Instance size sentinel for objects of variable size.
-static const int kVariableSizeSentinel = 0;
+const int kVariableSizeSentinel = 0;
// All Maps have a field instance_type containing a InstanceType.
@@ -286,7 +223,7 @@ static const int kVariableSizeSentinel = 0;
// encoding is considered TWO_BYTE. It is not mentioned in the name. ASCII
// encoding is mentioned explicitly in the name. Likewise, the default
// representation is considered sequential. It is not mentioned in the
-// name. The other representations (eg, CONS, EXTERNAL) are explicitly
+// name. The other representations (e.g. CONS, EXTERNAL) are explicitly
// mentioned. Finally, the string is either a SYMBOL_TYPE (if it is a
// symbol) or a STRING_TYPE (if it is not a symbol).
//
@@ -306,6 +243,9 @@ static const int kVariableSizeSentinel = 0;
V(EXTERNAL_SYMBOL_TYPE) \
V(EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE) \
V(EXTERNAL_ASCII_SYMBOL_TYPE) \
+ V(SHORT_EXTERNAL_SYMBOL_TYPE) \
+ V(SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE) \
+ V(SHORT_EXTERNAL_ASCII_SYMBOL_TYPE) \
V(STRING_TYPE) \
V(ASCII_STRING_TYPE) \
V(CONS_STRING_TYPE) \
@@ -314,6 +254,9 @@ static const int kVariableSizeSentinel = 0;
V(EXTERNAL_STRING_TYPE) \
V(EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \
V(EXTERNAL_ASCII_STRING_TYPE) \
+ V(SHORT_EXTERNAL_STRING_TYPE) \
+ V(SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \
+ V(SHORT_EXTERNAL_ASCII_STRING_TYPE) \
V(PRIVATE_EXTERNAL_ASCII_STRING_TYPE) \
\
V(MAP_TYPE) \
@@ -339,6 +282,7 @@ static const int kVariableSizeSentinel = 0;
V(FILLER_TYPE) \
\
V(ACCESSOR_INFO_TYPE) \
+ V(ACCESSOR_PAIR_TYPE) \
V(ACCESS_CHECK_INFO_TYPE) \
V(INTERCEPTOR_INFO_TYPE) \
V(CALL_HANDLER_INFO_TYPE) \
@@ -349,6 +293,8 @@ static const int kVariableSizeSentinel = 0;
V(SCRIPT_TYPE) \
V(CODE_CACHE_TYPE) \
V(POLYMORPHIC_CODE_CACHE_TYPE) \
+ V(TYPE_FEEDBACK_INFO_TYPE) \
+ V(ALIASED_ARGUMENTS_ENTRY_TYPE) \
\
V(FIXED_ARRAY_TYPE) \
V(FIXED_DOUBLE_ARRAY_TYPE) \
@@ -357,6 +303,7 @@ static const int kVariableSizeSentinel = 0;
V(JS_MESSAGE_OBJECT_TYPE) \
\
V(JS_VALUE_TYPE) \
+ V(JS_DATE_TYPE) \
V(JS_OBJECT_TYPE) \
V(JS_CONTEXT_EXTENSION_OBJECT_TYPE) \
V(JS_GLOBAL_OBJECT_TYPE) \
@@ -414,6 +361,18 @@ static const int kVariableSizeSentinel = 0;
ExternalAsciiString::kSize, \
external_ascii_symbol, \
ExternalAsciiSymbol) \
+ V(SHORT_EXTERNAL_SYMBOL_TYPE, \
+ ExternalTwoByteString::kShortSize, \
+ short_external_symbol, \
+ ShortExternalSymbol) \
+ V(SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE, \
+ ExternalTwoByteString::kShortSize, \
+ short_external_symbol_with_ascii_data, \
+ ShortExternalSymbolWithAsciiData) \
+ V(SHORT_EXTERNAL_ASCII_SYMBOL_TYPE, \
+ ExternalAsciiString::kShortSize, \
+ short_external_ascii_symbol, \
+ ShortExternalAsciiSymbol) \
V(STRING_TYPE, \
kVariableSizeSentinel, \
string, \
@@ -449,7 +408,19 @@ static const int kVariableSizeSentinel = 0;
V(EXTERNAL_ASCII_STRING_TYPE, \
ExternalAsciiString::kSize, \
external_ascii_string, \
- ExternalAsciiString)
+ ExternalAsciiString) \
+ V(SHORT_EXTERNAL_STRING_TYPE, \
+ ExternalTwoByteString::kShortSize, \
+ short_external_string, \
+ ShortExternalString) \
+ V(SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE, \
+ ExternalTwoByteString::kShortSize, \
+ short_external_string_with_ascii_data, \
+ ShortExternalStringWithAsciiData) \
+ V(SHORT_EXTERNAL_ASCII_STRING_TYPE, \
+ ExternalAsciiString::kShortSize, \
+ short_external_ascii_string, \
+ ShortExternalAsciiString)
// A struct is a simple object a set of object-valued fields. Including an
// object type in this causes the compiler to generate most of the boilerplate
@@ -462,6 +433,7 @@ static const int kVariableSizeSentinel = 0;
// manually.
#define STRUCT_LIST_ALL(V) \
V(ACCESSOR_INFO, AccessorInfo, accessor_info) \
+ V(ACCESSOR_PAIR, AccessorPair, accessor_pair) \
V(ACCESS_CHECK_INFO, AccessCheckInfo, access_check_info) \
V(INTERCEPTOR_INFO, InterceptorInfo, interceptor_info) \
V(CALL_HANDLER_INFO, CallHandlerInfo, call_handler_info) \
@@ -471,7 +443,9 @@ static const int kVariableSizeSentinel = 0;
V(TYPE_SWITCH_INFO, TypeSwitchInfo, type_switch_info) \
V(SCRIPT, Script, script) \
V(CODE_CACHE, CodeCache, code_cache) \
- V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache)
+ V(POLYMORPHIC_CODE_CACHE, PolymorphicCodeCache, polymorphic_code_cache) \
+ V(TYPE_FEEDBACK_INFO, TypeFeedbackInfo, type_feedback_info) \
+ V(ALIASED_ARGUMENTS_ENTRY, AliasedArgumentsEntry, aliased_arguments_entry)
#ifdef ENABLE_DEBUGGER_SUPPORT
#define STRUCT_LIST_DEBUGGER(V) \
@@ -529,10 +503,15 @@ const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
// If bit 7 is clear, then bit 3 indicates whether this two-byte
-// string actually contains ascii data.
+// string actually contains ASCII data.
const uint32_t kAsciiDataHintMask = 0x08;
const uint32_t kAsciiDataHintTag = 0x08;
+// If bit 7 is clear and string representation indicates an external string,
+// then bit 4 indicates whether the data pointer is cached.
+const uint32_t kShortExternalStringMask = 0x10;
+const uint32_t kShortExternalStringTag = 0x10;
+
// A ConsString with an empty string as the right side is a candidate
// for being shortcut by the garbage collector unless it is a
@@ -552,6 +531,13 @@ enum InstanceType {
ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kSeqStringTag,
CONS_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kConsStringTag,
CONS_ASCII_SYMBOL_TYPE = kAsciiStringTag | kSymbolTag | kConsStringTag,
+ SHORT_EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag |
+ kExternalStringTag | kShortExternalStringTag,
+ SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
+ kTwoByteStringTag | kSymbolTag | kExternalStringTag |
+ kAsciiDataHintTag | kShortExternalStringTag,
+ SHORT_EXTERNAL_ASCII_SYMBOL_TYPE = kAsciiStringTag | kExternalStringTag |
+ kSymbolTag | kShortExternalStringTag,
EXTERNAL_SYMBOL_TYPE = kTwoByteStringTag | kSymbolTag | kExternalStringTag,
EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kSymbolTag | kExternalStringTag | kAsciiDataHintTag,
@@ -563,6 +549,13 @@ enum InstanceType {
CONS_ASCII_STRING_TYPE = kAsciiStringTag | kConsStringTag,
SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag,
SLICED_ASCII_STRING_TYPE = kAsciiStringTag | kSlicedStringTag,
+ SHORT_EXTERNAL_STRING_TYPE =
+ kTwoByteStringTag | kExternalStringTag | kShortExternalStringTag,
+ SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
+ kTwoByteStringTag | kExternalStringTag |
+ kAsciiDataHintTag | kShortExternalStringTag,
+ SHORT_EXTERNAL_ASCII_STRING_TYPE =
+ kAsciiStringTag | kExternalStringTag | kShortExternalStringTag,
EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag,
EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kExternalStringTag | kAsciiDataHintTag,
@@ -596,6 +589,7 @@ enum InstanceType {
// Structs.
ACCESSOR_INFO_TYPE,
+ ACCESSOR_PAIR_TYPE,
ACCESS_CHECK_INFO_TYPE,
INTERCEPTOR_INFO_TYPE,
CALL_HANDLER_INFO_TYPE,
@@ -606,6 +600,8 @@ enum InstanceType {
SCRIPT_TYPE,
CODE_CACHE_TYPE,
POLYMORPHIC_CODE_CACHE_TYPE,
+ TYPE_FEEDBACK_INFO_TYPE,
+ ALIASED_ARGUMENTS_ENTRY_TYPE,
// The following two instance types are only used when ENABLE_DEBUGGER_SUPPORT
// is defined. However as include/v8.h contain some of the instance type
// constants always having them avoids them getting different numbers
@@ -627,6 +623,7 @@ enum InstanceType {
JS_PROXY_TYPE, // LAST_JS_PROXY_TYPE
JS_VALUE_TYPE, // FIRST_JS_OBJECT_TYPE
+ JS_DATE_TYPE,
JS_OBJECT_TYPE,
JS_CONTEXT_EXTENSION_OBJECT_TYPE,
JS_GLOBAL_OBJECT_TYPE,
@@ -675,8 +672,8 @@ enum InstanceType {
NUM_OF_CALLABLE_SPEC_OBJECT_TYPES = 2
};
-static const int kExternalArrayTypeCount = LAST_EXTERNAL_ARRAY_TYPE -
- FIRST_EXTERNAL_ARRAY_TYPE + 1;
+const int kExternalArrayTypeCount =
+ LAST_EXTERNAL_ARRAY_TYPE - FIRST_EXTERNAL_ARRAY_TYPE + 1;
STATIC_CHECK(JS_OBJECT_TYPE == Internals::kJSObjectType);
STATIC_CHECK(FIRST_NONSTRING_TYPE == Internals::kFirstNonstringType);
@@ -810,16 +807,18 @@ class MaybeObject BASE_EMBEDDED {
V(DescriptorArray) \
V(DeoptimizationInputData) \
V(DeoptimizationOutputData) \
+ V(TypeFeedbackCells) \
V(FixedArray) \
V(FixedDoubleArray) \
V(Context) \
V(GlobalContext) \
- V(SerializedScopeInfo) \
+ V(ScopeInfo) \
V(JSFunction) \
V(Code) \
V(Oddball) \
V(SharedFunctionInfo) \
V(JSValue) \
+ V(JSDate) \
V(JSMessageObject) \
V(StringWrapper) \
V(Foreign) \
@@ -861,6 +860,8 @@ class JSReceiver;
class Object : public MaybeObject {
public:
// Type testing.
+ bool IsObject() { return true; }
+
#define IS_TYPE_FUNCTION_DECL(type_) inline bool Is##type_();
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
@@ -894,6 +895,7 @@ class Object : public MaybeObject {
// Extract the number.
inline double Number();
+ inline bool IsNaN();
// Returns true if the object is of the correct type to be used as a
// implementation of a JSObject's elements.
@@ -939,10 +941,11 @@ class Object : public MaybeObject {
JSReceiver* getter);
static Handle<Object> GetElement(Handle<Object> object, uint32_t index);
- inline MaybeObject* GetElement(uint32_t index);
+ MUST_USE_RESULT inline MaybeObject* GetElement(uint32_t index);
// For use when we know that no exception can be thrown.
inline Object* GetElementNoExceptionThrown(uint32_t index);
- MaybeObject* GetElementWithReceiver(Object* receiver, uint32_t index);
+ MUST_USE_RESULT MaybeObject* GetElementWithReceiver(Object* receiver,
+ uint32_t index);
// Return the object's prototype (might be Heap::null_value()).
Object* GetPrototype();
@@ -1022,7 +1025,8 @@ class Smi: public Object {
void SmiVerify();
#endif
- static const int kMinValue = (-1 << (kSmiValueSize - 1));
+ static const int kMinValue =
+ (static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
static const int kMaxValue = -(kMinValue + 1);
private:
@@ -1103,7 +1107,7 @@ class Failure: public MaybeObject {
// Heap objects typically have a map pointer in their first word. However,
-// during GC other data (eg, mark bits, forwarding addresses) is sometimes
+// during GC other data (e.g. mark bits, forwarding addresses) is sometimes
// encoded in the first word. The class MapWord is an abstraction of the
// value in a heap object's first word.
class MapWord BASE_EMBEDDED {
@@ -1122,7 +1126,7 @@ class MapWord BASE_EMBEDDED {
// True if this map word is a forwarding address for a scavenge
// collection. Only valid during a scavenge collection (specifically,
- // when all map words are heap object pointers, ie. not during a full GC).
+ // when all map words are heap object pointers, i.e. not during a full GC).
inline bool IsForwardingAddress();
// Create a map word from a forwarding address.
@@ -1157,7 +1161,10 @@ class HeapObject: public Object {
// information.
inline Map* map();
inline void set_map(Map* value);
- inline void set_map_unsafe(Map* value);
+ // The no-write-barrier version. This is OK if the object is white and in
+ // new space, or if the value is an immortal immutable object, like the maps
+ // of primitive (non-JS) objects like strings, heap numbers etc.
+ inline void set_map_no_write_barrier(Map* value);
// During garbage collection, the map word of a heap object does not
// necessarily contain a map pointer.
@@ -1345,6 +1352,30 @@ class HeapNumber: public HeapObject {
};
+enum EnsureElementsMode {
+ DONT_ALLOW_DOUBLE_ELEMENTS,
+ ALLOW_COPIED_DOUBLE_ELEMENTS,
+ ALLOW_CONVERTED_DOUBLE_ELEMENTS
+};
+
+
+// Indicates whether a property should be set or (re)defined. Setting of a
+// property causes attributes to remain unchanged, writability to be checked
+// and callbacks to be called. Defining of a property causes attributes to
+// be updated and callbacks to be overridden.
+enum SetPropertyMode {
+ SET_PROPERTY,
+ DEFINE_PROPERTY
+};
+
+
+// Indicator for one component of an AccessorPair.
+enum AccessorComponent {
+ ACCESSOR_GETTER,
+ ACCESSOR_SETTER
+};
+
+
// JSReceiver includes types on which properties can be defined, i.e.,
// JSObject and JSProxy.
class JSReceiver: public HeapObject {
@@ -1358,12 +1389,16 @@ class JSReceiver: public HeapObject {
// Casting.
static inline JSReceiver* cast(Object* obj);
+ static Handle<Object> SetProperty(Handle<JSReceiver> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode);
// Can cause GC.
MUST_USE_RESULT MaybeObject* SetProperty(String* key,
Object* value,
PropertyAttributes attributes,
- StrictModeFlag strict_mode,
- bool skip_fallback_interceptor = false);
+ StrictModeFlag strict_mode);
MUST_USE_RESULT MaybeObject* SetProperty(LookupResult* result,
String* key,
Object* value,
@@ -1379,6 +1414,7 @@ class JSReceiver: public HeapObject {
// Can cause GC, or return failure if GC is required.
MUST_USE_RESULT MaybeObject* SetElement(uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
bool check_prototype);
@@ -1415,8 +1451,8 @@ class JSReceiver: public HeapObject {
// Lookup a property. If found, the result is valid and has
// detailed information.
- void LocalLookup(String* name, LookupResult* result, bool skip_fallback_interceptor = false);
- void Lookup(String* name, LookupResult* result, bool skip_fallback_interceptor = false);
+ void LocalLookup(String* name, LookupResult* result);
+ void Lookup(String* name, LookupResult* result);
protected:
Smi* GenerateIdentityHash();
@@ -1488,8 +1524,12 @@ class JSObject: public JSReceiver {
inline bool HasExternalDoubleElements();
bool HasFastArgumentsElements();
bool HasDictionaryArgumentsElements();
- inline bool AllowsSetElementsLength();
- inline NumberDictionary* element_dictionary(); // Gets slow elements.
+ inline SeededNumberDictionary* element_dictionary(); // Gets slow elements.
+
+ inline void set_map_and_elements(
+ Map* map,
+ FixedArrayBase* value,
+ WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// Requires: HasFastElements().
MUST_USE_RESULT inline MaybeObject* EnsureWritableFastElements();
@@ -1534,6 +1574,14 @@ class JSObject: public JSReceiver {
Object* value,
PropertyAttributes attributes,
StrictModeFlag strict_mode);
+
+ static Handle<Object> SetLocalPropertyIgnoreAttributes(
+ Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyAttributes attributes);
+
+ // Can cause GC.
MUST_USE_RESULT MaybeObject* SetLocalPropertyIgnoreAttributes(
String* key,
Object* value,
@@ -1549,6 +1597,11 @@ class JSObject: public JSReceiver {
// Sets the property value in a normalized object given (key, value, details).
// Handles the special representation of JS global objects.
+ static Handle<Object> SetNormalizedProperty(Handle<JSObject> object,
+ Handle<String> key,
+ Handle<Object> value,
+ PropertyDetails details);
+
MUST_USE_RESULT MaybeObject* SetNormalizedProperty(String* name,
Object* value,
PropertyDetails details);
@@ -1574,31 +1627,37 @@ class JSObject: public JSReceiver {
String* name,
bool continue_search);
+ static void DefineAccessor(Handle<JSObject> object,
+ Handle<String> name,
+ Handle<Object> getter,
+ Handle<Object> setter,
+ PropertyAttributes attributes);
MUST_USE_RESULT MaybeObject* DefineAccessor(String* name,
- bool is_getter,
- Object* fun,
+ Object* getter,
+ Object* setter,
PropertyAttributes attributes);
- Object* LookupAccessor(String* name, bool is_getter);
+ Object* LookupAccessor(String* name, AccessorComponent component);
MUST_USE_RESULT MaybeObject* DefineAccessor(AccessorInfo* info);
// Used from Object::GetProperty().
- MaybeObject* GetPropertyWithFailedAccessCheck(
+ MUST_USE_RESULT MaybeObject* GetPropertyWithFailedAccessCheck(
Object* receiver,
LookupResult* result,
String* name,
PropertyAttributes* attributes);
- MaybeObject* GetPropertyWithInterceptor(
+ MUST_USE_RESULT MaybeObject* GetPropertyWithInterceptor(
JSReceiver* receiver,
String* name,
PropertyAttributes* attributes);
- MaybeObject* GetPropertyPostInterceptor(
+ MUST_USE_RESULT MaybeObject* GetPropertyPostInterceptor(
+ JSReceiver* receiver,
+ String* name,
+ PropertyAttributes* attributes);
+ MUST_USE_RESULT MaybeObject* GetLocalPropertyPostInterceptor(
JSReceiver* receiver,
String* name,
PropertyAttributes* attributes);
- MaybeObject* GetLocalPropertyPostInterceptor(JSReceiver* receiver,
- String* name,
- PropertyAttributes* attributes);
// Returns true if this is an instance of an api function and has
// been modified since it was created. May give false positives.
@@ -1618,9 +1677,12 @@ class JSObject: public JSReceiver {
// hidden properties.
// Sets a hidden property on this object. Returns this object if successful,
- // undefined if called on a detached proxy, and a failure if a GC
- // is required
- MaybeObject* SetHiddenProperty(String* key, Object* value);
+ // undefined if called on a detached proxy.
+ static Handle<Object> SetHiddenProperty(Handle<JSObject> obj,
+ Handle<String> key,
+ Handle<Object> value);
+ // Returns a failure if a GC is required.
+ MUST_USE_RESULT MaybeObject* SetHiddenProperty(String* key, Object* value);
// Gets the value of a hidden property with the given key. Returns undefined
// if the property doesn't exist (or if called on a detached proxy),
// otherwise returns the value set for the key.
@@ -1631,24 +1693,35 @@ class JSObject: public JSReceiver {
// Returns true if the object has a property with the hidden symbol as name.
bool HasHiddenProperties();
+ static int GetIdentityHash(Handle<JSObject> obj);
MUST_USE_RESULT MaybeObject* GetIdentityHash(CreationFlag flag);
MUST_USE_RESULT MaybeObject* SetIdentityHash(Object* hash, CreationFlag flag);
+ static Handle<Object> DeleteProperty(Handle<JSObject> obj,
+ Handle<String> name);
MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
+
+ static Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
inline void ValidateSmiOnlyElements();
- // Makes sure that this object can contain non-smi Object as elements.
- inline MaybeObject* EnsureCanContainNonSmiElements();
+ // Makes sure that this object can contain HeapObject as elements.
+ MUST_USE_RESULT inline MaybeObject* EnsureCanContainHeapObjectElements();
// Makes sure that this object can contain the specified elements.
- inline MaybeObject* EnsureCanContainElements(Object** elements,
- uint32_t count);
- inline MaybeObject* EnsureCanContainElements(FixedArray* elements);
- MaybeObject* EnsureCanContainElements(Arguments* arguments,
- uint32_t first_arg,
- uint32_t arg_count);
+ MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
+ Object** elements,
+ uint32_t count,
+ EnsureElementsMode mode);
+ MUST_USE_RESULT inline MaybeObject* EnsureCanContainElements(
+ FixedArrayBase* elements,
+ EnsureElementsMode mode);
+ MUST_USE_RESULT MaybeObject* EnsureCanContainElements(
+ Arguments* arguments,
+ uint32_t first_arg,
+ uint32_t arg_count,
+ EnsureElementsMode mode);
// Do we want to keep the elements in fast case when increasing the
// capacity?
@@ -1659,8 +1732,9 @@ class JSObject: public JSReceiver {
// elements.
bool ShouldConvertToFastElements();
// Returns true if the elements of JSObject contains only values that can be
- // represented in a FixedDoubleArray.
- bool CanConvertToFastDoubleElements();
+ // represented in a FixedDoubleArray and has at least one value that can only
+ // be represented as a double and not a Smi.
+ bool ShouldConvertToFastDoubleElements(bool* has_smi_only_elements);
// Tells whether the index'th element is present.
bool HasElementWithReceiver(JSReceiver* receiver, uint32_t index);
@@ -1692,17 +1766,19 @@ class JSObject: public JSReceiver {
LocalElementType HasLocalElement(uint32_t index);
bool HasElementWithInterceptor(JSReceiver* receiver, uint32_t index);
- bool HasElementPostInterceptor(JSReceiver* receiver, uint32_t index);
MUST_USE_RESULT MaybeObject* SetFastElement(uint32_t index,
Object* value,
StrictModeFlag strict_mode,
bool check_prototype);
- MUST_USE_RESULT MaybeObject* SetDictionaryElement(uint32_t index,
- Object* value,
- StrictModeFlag strict_mode,
- bool check_prototype);
+ MUST_USE_RESULT MaybeObject* SetDictionaryElement(
+ uint32_t index,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ bool check_prototype,
+ SetPropertyMode set_mode = SET_PROPERTY);
MUST_USE_RESULT MaybeObject* SetFastDoubleElement(
uint32_t index,
@@ -1710,19 +1786,37 @@ class JSObject: public JSReceiver {
StrictModeFlag strict_mode,
bool check_prototype = true);
- // Set the index'th array element.
+ static Handle<Object> SetOwnElement(Handle<JSObject> object,
+ uint32_t index,
+ Handle<Object> value,
+ StrictModeFlag strict_mode);
+
+ // Empty handle is returned if the element cannot be set to the given value.
+ static MUST_USE_RESULT Handle<Object> SetElement(
+ Handle<JSObject> object,
+ uint32_t index,
+ Handle<Object> value,
+ PropertyAttributes attr,
+ StrictModeFlag strict_mode,
+ SetPropertyMode set_mode = SET_PROPERTY);
+
// A Failure object is returned if GC is needed.
- MUST_USE_RESULT MaybeObject* SetElement(uint32_t index,
- Object* value,
- StrictModeFlag strict_mode,
- bool check_prototype);
+ MUST_USE_RESULT MaybeObject* SetElement(
+ uint32_t index,
+ Object* value,
+ PropertyAttributes attributes,
+ StrictModeFlag strict_mode,
+ bool check_prototype = true,
+ SetPropertyMode set_mode = SET_PROPERTY);
// Returns the index'th element.
// The undefined object if index is out of bounds.
- MaybeObject* GetElementWithInterceptor(Object* receiver, uint32_t index);
+ MUST_USE_RESULT MaybeObject* GetElementWithInterceptor(Object* receiver,
+ uint32_t index);
enum SetFastElementsCapacityMode {
kAllowSmiOnlyElements,
+ kForceSmiOnlyElements,
kDontAllowSmiOnlyElements
};
@@ -1736,7 +1830,6 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT MaybeObject* SetFastDoubleElementsCapacityAndLength(
int capacity,
int length);
- MUST_USE_RESULT MaybeObject* SetSlowElements(Object* length);
// Lookup interceptors are used for handling properties controlled by host
// objects.
@@ -1748,9 +1841,6 @@ class JSObject: public JSReceiver {
bool HasRealElementProperty(uint32_t index);
bool HasRealNamedCallbackProperty(String* key);
- // Initializes the array to a certain length
- MUST_USE_RESULT MaybeObject* SetElementsLength(Object* length);
-
// Get the header size for a JSObject. Used to compute the index of
// internal fields as well as the number of internal fields.
inline int GetHeaderSize();
@@ -1759,9 +1849,7 @@ class JSObject: public JSReceiver {
inline int GetInternalFieldOffset(int index);
inline Object* GetInternalField(int index);
inline void SetInternalField(int index, Object* value);
-
- inline void SetExternalResourceObject(Object *);
- inline Object *GetExternalResourceObject();
+ inline void SetInternalField(int index, Smi* value);
// The following lookup functions skip interceptors.
void LocalLookupRealNamedProperty(String* name, LookupResult* result);
@@ -1774,9 +1862,7 @@ class JSObject: public JSReceiver {
// Returns the number of properties on this object filtering out properties
// with the specified attributes (ignoring interceptors).
- int NumberOfLocalProperties(PropertyAttributes filter);
- // Returns the number of enumerable properties (ignoring interceptors).
- int NumberOfEnumProperties();
+ int NumberOfLocalProperties(PropertyAttributes filter = NONE);
// Fill in details for properties into storage starting at the specified
// index.
void GetLocalPropertyNames(FixedArray* storage, int index);
@@ -1822,8 +1908,14 @@ class JSObject: public JSReceiver {
// map and the ElementsKind set.
static Handle<Map> GetElementsTransitionMap(Handle<JSObject> object,
ElementsKind to_kind);
- MUST_USE_RESULT MaybeObject* GetElementsTransitionMap(
+ inline MUST_USE_RESULT MaybeObject* GetElementsTransitionMap(
+ Isolate* isolate,
ElementsKind elements_kind);
+ MUST_USE_RESULT MaybeObject* GetElementsTransitionMapSlow(
+ ElementsKind elements_kind);
+
+ static Handle<Object> TransitionElementsKind(Handle<JSObject> object,
+ ElementsKind to_kind);
MUST_USE_RESULT MaybeObject* TransitionElementsKind(ElementsKind to_kind);
@@ -1865,12 +1957,19 @@ class JSObject: public JSReceiver {
// representation. If the object is expected to have additional properties
// added this number can be indicated to have the backing store allocated to
// an initial capacity for holding these properties.
+ static void NormalizeProperties(Handle<JSObject> object,
+ PropertyNormalizationMode mode,
+ int expected_additional_properties);
+
MUST_USE_RESULT MaybeObject* NormalizeProperties(
PropertyNormalizationMode mode,
int expected_additional_properties);
- // Convert and update the elements backing store to be a NumberDictionary
- // dictionary. Returns the backing after conversion.
+ // Convert and update the elements backing store to be a
+ // SeededNumberDictionary dictionary. Returns the backing after conversion.
+ static Handle<SeededNumberDictionary> NormalizeElements(
+ Handle<JSObject> object);
+
MUST_USE_RESULT MaybeObject* NormalizeElements();
static void UpdateMapCodeCache(Handle<JSObject> object,
@@ -1881,6 +1980,9 @@ class JSObject: public JSReceiver {
// Transform slow named properties to fast variants.
// Returns failure if allocation failed.
+ static void TransformToFastProperties(Handle<JSObject> object,
+ int unused_property_fields);
+
MUST_USE_RESULT MaybeObject* TransformToFastProperties(
int unused_property_fields);
@@ -1912,6 +2014,7 @@ class JSObject: public JSReceiver {
static inline JSObject* cast(Object* obj);
// Disalow further properties to be added to the object.
+ static Handle<Object> PreventExtensions(Handle<JSObject> object);
MUST_USE_RESULT MaybeObject* PreventExtensions();
@@ -2016,21 +2119,26 @@ class JSObject: public JSReceiver {
Object* structure,
uint32_t index,
Object* holder);
- MaybeObject* SetElementWithCallback(Object* structure,
- uint32_t index,
- Object* value,
- JSObject* holder,
- StrictModeFlag strict_mode);
+ MUST_USE_RESULT MaybeObject* SetElementWithCallback(
+ Object* structure,
+ uint32_t index,
+ Object* value,
+ JSObject* holder,
+ StrictModeFlag strict_mode);
MUST_USE_RESULT MaybeObject* SetElementWithInterceptor(
uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool check_prototype);
+ bool check_prototype,
+ SetPropertyMode set_mode);
MUST_USE_RESULT MaybeObject* SetElementWithoutInterceptor(
uint32_t index,
Object* value,
+ PropertyAttributes attributes,
StrictModeFlag strict_mode,
- bool check_prototype);
+ bool check_prototype,
+ SetPropertyMode set_mode);
// Searches the prototype chain for a callback setter and sets the property
// with the setter if it finds one. The '*found' flag indicates whether
@@ -2057,9 +2165,6 @@ class JSObject: public JSReceiver {
bool ReferencesObjectFromElements(FixedArray* elements,
ElementsKind kind,
Object* object);
- bool HasElementInElements(FixedArray* elements,
- ElementsKind kind,
- uint32_t index);
// Returns true if most of the elements backing storage is used.
bool HasDenseElements();
@@ -2076,10 +2181,16 @@ class JSObject: public JSReceiver {
String* name,
Object* structure,
PropertyAttributes attributes);
- MUST_USE_RESULT MaybeObject* DefineGetterSetter(
+ MUST_USE_RESULT MaybeObject* DefineElementAccessor(
+ uint32_t index,
+ Object* getter,
+ Object* setter,
+ PropertyAttributes attributes);
+ MUST_USE_RESULT MaybeObject* DefinePropertyAccessor(
String* name,
+ Object* getter,
+ Object* setter,
PropertyAttributes attributes);
-
void LookupInDescriptor(String* name, LookupResult* result);
// Returns the hidden properties backing store object, currently
@@ -2087,9 +2198,11 @@ class JSObject: public JSReceiver {
// If no hidden properties object has been put on this object,
// return undefined, unless create_if_absent is true, in which case
// a new dictionary is created, added to this object, and returned.
- MaybeObject* GetHiddenPropertiesDictionary(bool create_if_absent);
+ MUST_USE_RESULT MaybeObject* GetHiddenPropertiesDictionary(
+ bool create_if_absent);
// Updates the existing hidden properties dictionary.
- MaybeObject* SetHiddenPropertiesDictionary(StringDictionary* dictionary);
+ MUST_USE_RESULT MaybeObject* SetHiddenPropertiesDictionary(
+ StringDictionary* dictionary);
DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject);
};
@@ -2146,6 +2259,9 @@ class FixedArray: public FixedArrayBase {
// Gives access to raw memory which stores the array's data.
inline Object** data_start();
+ inline Object** GetFirstElementAddress();
+ inline bool ContainsOnlySmisOrHoles();
+
// Copy operations.
MUST_USE_RESULT inline MaybeObject* Copy();
MUST_USE_RESULT MaybeObject* CopySize(int new_length);
@@ -2171,7 +2287,7 @@ class FixedArray: public FixedArrayBase {
// Maximal allowed size, in bytes, of a single FixedArray.
// Prevents overflowing size computations, as well as extreme memory
// consumption.
- static const int kMaxSize = 512 * MB;
+ static const int kMaxSize = 128 * MB * kPointerSize;
// Maximally allowed length of a FixedArray.
static const int kMaxLength = (kMaxSize - kHeaderSize) / kPointerSize;
@@ -2212,6 +2328,13 @@ class FixedArray: public FixedArrayBase {
int index,
Object* value);
+ // Set operation on FixedArray without incremental write barrier. Can
+ // only be used if the object is guaranteed to be white (whiteness witness
+ // is present).
+ static inline void NoIncrementalWriteBarrierSet(FixedArray* array,
+ int index,
+ Object* value);
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FixedArray);
};
@@ -2220,13 +2343,10 @@ class FixedArray: public FixedArrayBase {
// FixedDoubleArray describes fixed-sized arrays with element type double.
class FixedDoubleArray: public FixedArrayBase {
public:
- inline void Initialize(FixedArray* from);
- inline void Initialize(FixedDoubleArray* from);
- inline void Initialize(NumberDictionary* from);
-
// Setter and getter for elements.
inline double get_scalar(int index);
- inline MaybeObject* get(int index);
+ inline int64_t get_representation(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, double value);
inline void set_the_hole(int index);
@@ -2340,7 +2460,9 @@ class DescriptorArray: public FixedArray {
// Initialize or change the enum cache,
// using the supplied storage for the small "bridge".
- void SetEnumCache(FixedArray* bridge_storage, FixedArray* new_cache);
+ void SetEnumCache(FixedArray* bridge_storage,
+ FixedArray* new_cache,
+ Object* new_index_cache);
// Accessors for fetching instance descriptor at descriptor number.
inline String* GetKey(int descriptor_number);
@@ -2352,7 +2474,7 @@ class DescriptorArray: public FixedArray {
inline Object* GetCallbacksObject(int descriptor_number);
inline AccessorDescriptor* GetCallbacks(int descriptor_number);
inline bool IsProperty(int descriptor_number);
- inline bool IsTransition(int descriptor_number);
+ inline bool IsTransitionOnly(int descriptor_number);
inline bool IsNullDescriptor(int descriptor_number);
inline bool IsDontEnum(int descriptor_number);
@@ -2371,12 +2493,20 @@ class DescriptorArray: public FixedArray {
Descriptor* desc,
const WhitenessWitness&);
- // Transfer complete descriptor from another descriptor array to
- // this one.
- inline void CopyFrom(int index,
- DescriptorArray* src,
+ // Transfer a complete descriptor from the src descriptor array to the dst
+ // one, dropping map transitions in CALLBACKS.
+ static void CopyFrom(Handle<DescriptorArray> dst,
+ int dst_index,
+ Handle<DescriptorArray> src,
int src_index,
- const WhitenessWitness&);
+ const WhitenessWitness& witness);
+
+ // Transfer a complete descriptor from the src descriptor array to this
+ // descriptor array, dropping map transitions in CALLBACKS.
+ MUST_USE_RESULT MaybeObject* CopyFrom(int dst_index,
+ DescriptorArray* src,
+ int src_index,
+ const WhitenessWitness&);
// Copy the descriptor array, insert a new descriptor and optionally
// remove map transitions. If the descriptor is already present, it is
@@ -2387,8 +2517,8 @@ class DescriptorArray: public FixedArray {
MUST_USE_RESULT MaybeObject* CopyInsert(Descriptor* descriptor,
TransitionFlag transition_flag);
- // Remove all transitions. Return a copy of the array with all transitions
- // removed, or a Failure object if the new array could not be allocated.
+ // Return a copy of the array with all transitions and null descriptors
+ // removed. Return a Failure object in case of an allocation failure.
MUST_USE_RESULT MaybeObject* RemoveTransitions();
// Sort the instance descriptors by the hash codes of their keys.
@@ -2436,9 +2566,10 @@ class DescriptorArray: public FixedArray {
static const int kFirstIndex = 3;
// The length of the "bridge" to the enum cache.
- static const int kEnumCacheBridgeLength = 2;
+ static const int kEnumCacheBridgeLength = 3;
static const int kEnumCacheBridgeEnumIndex = 0;
static const int kEnumCacheBridgeCacheIndex = 1;
+ static const int kEnumCacheBridgeIndicesCacheIndex = 2;
// Layout description.
static const int kBitField3StorageOffset = FixedArray::kHeaderSize;
@@ -2472,6 +2603,20 @@ class DescriptorArray: public FixedArray {
static const int kMaxNumberOfDescriptors = 1024 + 512;
private:
+ // An entry in a DescriptorArray, represented as an (array, index) pair.
+ class Entry {
+ public:
+ inline explicit Entry(DescriptorArray* descs, int index) :
+ descs_(descs), index_(index) { }
+
+ inline PropertyType type() { return descs_->GetType(index_); }
+ inline Object* GetCallbackObject() { return descs_->GetValue(index_); }
+
+ private:
+ DescriptorArray* descs_;
+ int index_;
+ };
+
// Conversion from descriptor number to array indices.
static int ToKeyIndex(int descriptor_number) {
return descriptor_number+kFirstIndex;
@@ -2490,12 +2635,12 @@ class DescriptorArray: public FixedArray {
NULL_DESCRIPTOR;
}
// Swap operation on FixedArray without using write barriers.
- static inline void NoWriteBarrierSwap(FixedArray* array,
- int first,
- int second);
+ static inline void NoIncrementalWriteBarrierSwap(
+ FixedArray* array, int first, int second);
// Swap descriptor first and second.
- inline void NoWriteBarrierSwapDescriptors(int first, int second);
+ inline void NoIncrementalWriteBarrierSwapDescriptors(
+ int first, int second);
FixedArray* GetContentArray() {
return FixedArray::cast(get(kContentArrayIndex));
@@ -2513,7 +2658,7 @@ class DescriptorArray: public FixedArray {
// encountered and stops when unused elements are encountered.
//
// - Elements with key == undefined have not been used yet.
-// - Elements with key == null have been deleted.
+// - Elements with key == the_hole have been deleted.
//
// The hash table class is parameterized with a Shape and a Key.
// Shape must be a class with the following interface:
@@ -2537,9 +2682,44 @@ class DescriptorArray: public FixedArray {
// beginning of the backing storage that can be used for non-element
// information by subclasses.
+template<typename Key>
+class BaseShape {
+ public:
+ static const bool UsesSeed = false;
+ static uint32_t Hash(Key key) { return 0; }
+ static uint32_t SeededHash(Key key, uint32_t seed) {
+ ASSERT(UsesSeed);
+ return Hash(key);
+ }
+ static uint32_t HashForObject(Key key, Object* object) { return 0; }
+ static uint32_t SeededHashForObject(Key key, uint32_t seed, Object* object) {
+ ASSERT(UsesSeed);
+ return HashForObject(key, object);
+ }
+};
+
template<typename Shape, typename Key>
class HashTable: public FixedArray {
public:
+ // Wrapper methods
+ inline uint32_t Hash(Key key) {
+ if (Shape::UsesSeed) {
+ return Shape::SeededHash(key,
+ GetHeap()->HashSeed());
+ } else {
+ return Shape::Hash(key);
+ }
+ }
+
+ inline uint32_t HashForObject(Key key, Object* object) {
+ if (Shape::UsesSeed) {
+ return Shape::SeededHashForObject(key,
+ GetHeap()->HashSeed(), object);
+ } else {
+ return Shape::HashForObject(key, object);
+ }
+ }
+
// Returns the number of elements in the hash table.
int NumberOfElements() {
return Smi::cast(get(kNumberOfElementsIndex))->value();
@@ -2582,10 +2762,10 @@ class HashTable: public FixedArray {
// Returns the key at entry.
Object* KeyAt(int entry) { return get(EntryToIndex(entry)); }
- // Tells whether k is a real key. Null and undefined are not allowed
+ // Tells whether k is a real key. The hole and undefined are not allowed
// as keys and can be used to indicate missing or deleted elements.
bool IsKey(Object* k) {
- return !k->IsNull() && !k->IsUndefined();
+ return !k->IsTheHole() && !k->IsUndefined();
}
// Garbage collection support.
@@ -2681,7 +2861,6 @@ class HashTable: public FixedArray {
};
-
// HashTableKey is an abstract superclass for virtual key behavior.
class HashTableKey {
public:
@@ -2698,7 +2877,8 @@ class HashTableKey {
virtual ~HashTableKey() {}
};
-class SymbolTableShape {
+
+class SymbolTableShape : public BaseShape<HashTableKey*> {
public:
static inline bool IsMatch(HashTableKey* key, Object* value) {
return key->IsMatch(value);
@@ -2757,7 +2937,7 @@ class SymbolTable: public HashTable<SymbolTableShape, HashTableKey*> {
};
-class MapCacheShape {
+class MapCacheShape : public BaseShape<HashTableKey*> {
public:
static inline bool IsMatch(HashTableKey* key, Object* value) {
return key->IsMatch(value);
@@ -2804,22 +2984,12 @@ class Dictionary: public HashTable<Shape, Key> {
// Returns the value at entry.
Object* ValueAt(int entry) {
- return this->get(HashTable<Shape, Key>::EntryToIndex(entry)+1);
+ return this->get(HashTable<Shape, Key>::EntryToIndex(entry) + 1);
}
// Set the value for entry.
- // Returns false if the put wasn't performed due to property being read only.
- // Returns true on successful put.
- bool ValueAtPut(int entry, Object* value) {
- // Check that this value can actually be written.
- PropertyDetails details = DetailsAt(entry);
- // If a value has not been initilized we allow writing to it even if
- // it is read only (a declared const that has not been initialized).
- if (details.IsReadOnly() && !ValueAt(entry)->IsTheHole()) {
- return false;
- }
+ void ValueAtPut(int entry, Object* value) {
this->set(HashTable<Shape, Key>::EntryToIndex(entry) + 1, value);
- return true;
}
// Returns the property details for the property at entry.
@@ -2913,7 +3083,7 @@ class Dictionary: public HashTable<Shape, Key> {
};
-class StringDictionaryShape {
+class StringDictionaryShape : public BaseShape<String*> {
public:
static inline bool IsMatch(String* key, Object* other);
static inline uint32_t Hash(String* key);
@@ -2943,26 +3113,47 @@ class StringDictionary: public Dictionary<StringDictionaryShape, String*> {
// Find entry for key, otherwise return kNotFound. Optimized version of
// HashTable::FindEntry.
int FindEntry(String* key);
+
+ bool ContainsTransition(int entry);
};
-class NumberDictionaryShape {
+class NumberDictionaryShape : public BaseShape<uint32_t> {
public:
static inline bool IsMatch(uint32_t key, Object* other);
- static inline uint32_t Hash(uint32_t key);
- static inline uint32_t HashForObject(uint32_t key, Object* object);
MUST_USE_RESULT static inline MaybeObject* AsObject(uint32_t key);
- static const int kPrefixSize = 2;
static const int kEntrySize = 3;
static const bool kIsEnumerable = false;
};
-class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
+class SeededNumberDictionaryShape : public NumberDictionaryShape {
+ public:
+ static const bool UsesSeed = true;
+ static const int kPrefixSize = 2;
+
+ static inline uint32_t SeededHash(uint32_t key, uint32_t seed);
+ static inline uint32_t SeededHashForObject(uint32_t key,
+ uint32_t seed,
+ Object* object);
+};
+
+
+class UnseededNumberDictionaryShape : public NumberDictionaryShape {
public:
- static NumberDictionary* cast(Object* obj) {
+ static const int kPrefixSize = 0;
+
+ static inline uint32_t Hash(uint32_t key);
+ static inline uint32_t HashForObject(uint32_t key, Object* object);
+};
+
+
+class SeededNumberDictionary
+ : public Dictionary<SeededNumberDictionaryShape, uint32_t> {
+ public:
+ static SeededNumberDictionary* cast(Object* obj) {
ASSERT(obj->IsDictionary());
- return reinterpret_cast<NumberDictionary*>(obj);
+ return reinterpret_cast<SeededNumberDictionary*>(obj);
}
// Type specific at put (default NONE attributes is used when adding).
@@ -2972,6 +3163,13 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
PropertyDetails details);
// Set an existing entry or add a new one if needed.
+ // Return the updated dictionary.
+ MUST_USE_RESULT static Handle<SeededNumberDictionary> Set(
+ Handle<SeededNumberDictionary> dictionary,
+ uint32_t index,
+ Handle<Object> value,
+ PropertyDetails details);
+
MUST_USE_RESULT MaybeObject* Set(uint32_t key,
Object* value,
PropertyDetails details);
@@ -2991,9 +3189,6 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
// requires_slow_elements returns false.
inline uint32_t max_number_key();
- // Remove all entries were key is a number and (from <= key && key < to).
- void RemoveNumberEntries(uint32_t from, uint32_t to);
-
// Bit masks.
static const int kRequiresSlowElementsMask = 1;
static const int kRequiresSlowElementsTagSize = 1;
@@ -3001,8 +3196,31 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
};
+class UnseededNumberDictionary
+ : public Dictionary<UnseededNumberDictionaryShape, uint32_t> {
+ public:
+ static UnseededNumberDictionary* cast(Object* obj) {
+ ASSERT(obj->IsDictionary());
+ return reinterpret_cast<UnseededNumberDictionary*>(obj);
+ }
+
+ // Type specific at put (default NONE attributes is used when adding).
+ MUST_USE_RESULT MaybeObject* AtNumberPut(uint32_t key, Object* value);
+ MUST_USE_RESULT MaybeObject* AddNumberEntry(uint32_t key, Object* value);
+
+ // Set an existing entry or add a new one if needed.
+ // Return the updated dictionary.
+ MUST_USE_RESULT static Handle<UnseededNumberDictionary> Set(
+ Handle<UnseededNumberDictionary> dictionary,
+ uint32_t index,
+ Handle<Object> value);
+
+ MUST_USE_RESULT MaybeObject* Set(uint32_t key, Object* value);
+};
+
+
template <int entrysize>
-class ObjectHashTableShape {
+class ObjectHashTableShape : public BaseShape<Object*> {
public:
static inline bool IsMatch(Object* key, Object* other);
static inline uint32_t Hash(Object* key);
@@ -3054,8 +3272,7 @@ class ObjectHashTable: public HashTable<ObjectHashTableShape<2>, Object*> {
friend class MarkCompactCollector;
void AddEntry(int entry, Object* key, Object* value);
- void RemoveEntry(int entry, Heap* heap);
- inline void RemoveEntry(int entry);
+ void RemoveEntry(int entry);
// Returns the index to the value of an entry.
static inline int EntryToValueIndex(int entry) {
@@ -3102,14 +3319,16 @@ class JSFunctionResultCache: public FixedArray {
};
+// ScopeInfo represents information about different scopes of a source
+// program and the allocation of the scope's variables. Scope information
+// is stored in a compressed form in ScopeInfo objects and is used
+// at runtime (stack dumps, deoptimization, etc.).
+
// This object provides quick access to scope info details for runtime
-// routines w/o the need to explicitly create a ScopeInfo object.
-class SerializedScopeInfo : public FixedArray {
- public :
- static SerializedScopeInfo* cast(Object* object) {
- ASSERT(object->IsSerializedScopeInfo());
- return reinterpret_cast<SerializedScopeInfo*>(object);
- }
+// routines.
+class ScopeInfo : public FixedArray {
+ public:
+ static inline ScopeInfo* cast(Object* object);
// Return the type of this scope.
ScopeType Type();
@@ -3117,24 +3336,63 @@ class SerializedScopeInfo : public FixedArray {
// Does this scope call eval?
bool CallsEval();
- // Is this scope a strict mode scope?
- bool IsStrictMode();
+ // Return the language mode of this scope.
+ LanguageMode language_mode();
- // Is this scope a qml mode scope?
- bool IsQmlMode();
+ // Does this scope make a non-strict eval call?
+ bool CallsNonStrictEval() {
+ return CallsEval() && (language_mode() == CLASSIC_MODE);
+ }
- // Return the number of stack slots for code.
- int NumberOfStackSlots();
+ // Return the total number of locals allocated on the stack and in the
+ // context. This includes the parameters that are allocated in the context.
+ int LocalCount();
- // Return the number of context slots for code.
- int NumberOfContextSlots();
+ // Return the number of stack slots for code. This number consists of two
+ // parts:
+ // 1. One stack slot per stack allocated local.
+ // 2. One stack slot for the function name if it is stack allocated.
+ int StackSlotCount();
- // Return if this has context slots besides MIN_CONTEXT_SLOTS;
+ // Return the number of context slots for code if a context is allocated. This
+ // number consists of three parts:
+ // 1. Size of fixed header for every context: Context::MIN_CONTEXT_SLOTS
+ // 2. One context slot per context allocated local.
+ // 3. One context slot for the function name if it is context allocated.
+ // Parameters allocated in the context count as context allocated locals. If
+ // no contexts are allocated for this scope ContextLength returns 0.
+ int ContextLength();
+
+ // Is this scope the scope of a named function expression?
+ bool HasFunctionName();
+
+ // Return if this has context allocated locals.
bool HasHeapAllocatedLocals();
// Return if contexts are allocated for this scope.
bool HasContext();
+ // Return the function_name if present.
+ String* FunctionName();
+
+ // Return the name of the given parameter.
+ String* ParameterName(int var);
+
+ // Return the name of the given local.
+ String* LocalName(int var);
+
+ // Return the name of the given stack local.
+ String* StackLocalName(int var);
+
+ // Return the name of the given context local.
+ String* ContextLocalName(int var);
+
+ // Return the mode of the given context local.
+ VariableMode ContextLocalMode(int var);
+
+ // Return the initialization flag of the given context local.
+ InitializationFlag ContextLocalInitFlag(int var);
+
// Lookup support for serialized scope info. Returns the
// the stack slot index for a given slot name if the slot is
// present; otherwise returns a value < 0. The name must be a symbol
@@ -3146,7 +3404,9 @@ class SerializedScopeInfo : public FixedArray {
// returns a value < 0. The name must be a symbol (canonicalized).
// If the slot is present and mode != NULL, sets *mode to the corresponding
// mode for that variable.
- int ContextSlotIndex(String* name, VariableMode* mode);
+ int ContextSlotIndex(String* name,
+ VariableMode* mode,
+ InitializationFlag* init_flag);
// Lookup support for serialized scope info. Returns the
// parameter index for a given parameter name if the parameter is present;
@@ -3159,17 +3419,104 @@ class SerializedScopeInfo : public FixedArray {
// must be a symbol (canonicalized).
int FunctionContextSlotIndex(String* name, VariableMode* mode);
- static Handle<SerializedScopeInfo> Create(Scope* scope);
+ static Handle<ScopeInfo> Create(Scope* scope);
// Serializes empty scope info.
- static SerializedScopeInfo* Empty();
+ static ScopeInfo* Empty();
+
+#ifdef DEBUG
+ void Print();
+#endif
+
+ // The layout of the static part of a ScopeInfo is as follows. Each entry is
+ // numeric and occupies one array slot.
+ // 1. A set of properties of the scope
+ // 2. The number of parameters. This only applies to function scopes. For
+ // non-function scopes this is 0.
+ // 3. The number of non-parameter variables allocated on the stack.
+ // 4. The number of non-parameter and parameter variables allocated in the
+ // context.
+#define FOR_EACH_NUMERIC_FIELD(V) \
+ V(Flags) \
+ V(ParameterCount) \
+ V(StackLocalCount) \
+ V(ContextLocalCount)
+
+#define FIELD_ACCESSORS(name) \
+ void Set##name(int value) { \
+ set(k##name, Smi::FromInt(value)); \
+ } \
+ int name() { \
+ if (length() > 0) { \
+ return Smi::cast(get(k##name))->value(); \
+ } else { \
+ return 0; \
+ } \
+ }
+ FOR_EACH_NUMERIC_FIELD(FIELD_ACCESSORS)
+#undef FIELD_ACCESSORS
private:
- Object** ContextEntriesAddr();
+ enum {
+#define DECL_INDEX(name) k##name,
+ FOR_EACH_NUMERIC_FIELD(DECL_INDEX)
+#undef DECL_INDEX
+#undef FOR_EACH_NUMERIC_FIELD
+ kVariablePartIndex
+ };
- Object** ParameterEntriesAddr();
+ // The layout of the variable part of a ScopeInfo is as follows:
+ // 1. ParameterEntries:
+ // This part stores the names of the parameters for function scopes. One
+ // slot is used per parameter, so in total this part occupies
+ // ParameterCount() slots in the array. For other scopes than function
+ // scopes ParameterCount() is 0.
+ // 2. StackLocalEntries:
+ // Contains the names of local variables that are allocated on the stack,
+ // in increasing order of the stack slot index. One slot is used per stack
+ // local, so in total this part occupies StackLocalCount() slots in the
+ // array.
+ // 3. ContextLocalNameEntries:
+ // Contains the names of local variables and parameters that are allocated
+ // in the context. They are stored in increasing order of the context slot
+ // index starting with Context::MIN_CONTEXT_SLOTS. One slot is used per
+ // context local, so in total this part occupies ContextLocalCount() slots
+ // in the array.
+ // 4. ContextLocalInfoEntries:
+ // Contains the variable modes and initialization flags corresponding to
+ // the context locals in ContextLocalNameEntries. One slot is used per
+ // context local, so in total this part occupies ContextLocalCount()
+ // slots in the array.
+ // 5. FunctionNameEntryIndex:
+ // If the scope belongs to a named function expression this part contains
+ // information about the function variable. It always occupies two array
+ // slots: a. The name of the function variable.
+ // b. The context or stack slot index for the variable.
+ int ParameterEntriesIndex();
+ int StackLocalEntriesIndex();
+ int ContextLocalNameEntriesIndex();
+ int ContextLocalInfoEntriesIndex();
+ int FunctionNameEntryIndex();
+
+ // Location of the function variable for named function expressions.
+ enum FunctionVariableInfo {
+ NONE, // No function name present.
+ STACK, // Function
+ CONTEXT,
+ UNUSED
+ };
- Object** StackSlotEntriesAddr();
+ // Properties of scopes.
+ class TypeField: public BitField<ScopeType, 0, 3> {};
+ class CallsEvalField: public BitField<bool, 3, 1> {};
+ class LanguageModeField: public BitField<LanguageMode, 4, 2> {};
+ class FunctionVariableField: public BitField<FunctionVariableInfo, 6, 2> {};
+ class FunctionVariableMode: public BitField<VariableMode, 8, 3> {};
+
+ // BitFields representing the encoded information for context locals in the
+ // ContextLocalInfoEntries part.
+ class ContextLocalMode: public BitField<VariableMode, 0, 3> {};
+ class ContextLocalInitFlag: public BitField<InitializationFlag, 3, 1> {};
};
@@ -3286,9 +3633,6 @@ class FreeSpace: public HeapObject {
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
- // Maximal size of a single FreeSpace.
- static const int kMaxSize = 512 * MB;
-
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeSpace);
};
@@ -3344,7 +3688,7 @@ class ExternalPixelArray: public ExternalArray {
// Setter and getter.
inline uint8_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber and
@@ -3373,12 +3717,12 @@ class ExternalByteArray: public ExternalArray {
public:
// Setter and getter.
inline int8_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, int8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalByteArray* cast(Object* obj);
@@ -3402,12 +3746,12 @@ class ExternalUnsignedByteArray: public ExternalArray {
public:
// Setter and getter.
inline uint8_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalUnsignedByteArray* cast(Object* obj);
@@ -3431,12 +3775,12 @@ class ExternalShortArray: public ExternalArray {
public:
// Setter and getter.
inline int16_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, int16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalShortArray* cast(Object* obj);
@@ -3460,12 +3804,12 @@ class ExternalUnsignedShortArray: public ExternalArray {
public:
// Setter and getter.
inline uint16_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, uint16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalUnsignedShortArray* cast(Object* obj);
@@ -3489,12 +3833,12 @@ class ExternalIntArray: public ExternalArray {
public:
// Setter and getter.
inline int32_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, int32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalIntArray* cast(Object* obj);
@@ -3518,12 +3862,12 @@ class ExternalUnsignedIntArray: public ExternalArray {
public:
// Setter and getter.
inline uint32_t get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, uint32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalUnsignedIntArray* cast(Object* obj);
@@ -3547,12 +3891,12 @@ class ExternalFloatArray: public ExternalArray {
public:
// Setter and getter.
inline float get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, float value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalFloatArray* cast(Object* obj);
@@ -3576,12 +3920,12 @@ class ExternalDoubleArray: public ExternalArray {
public:
// Setter and getter.
inline double get_scalar(int index);
- inline MaybeObject* get(int index);
+ MUST_USE_RESULT inline MaybeObject* get(int index);
inline void set(int index, double value);
// This accessor applies the correct conversion from Smi, HeapNumber
// and undefined.
- MaybeObject* SetValue(uint32_t index, Object* value);
+ MUST_USE_RESULT MaybeObject* SetValue(uint32_t index, Object* value);
// Casting.
static inline ExternalDoubleArray* cast(Object* obj);
@@ -3622,7 +3966,8 @@ class DeoptimizationInputData: public FixedArray {
static const int kAstIdOffset = 0;
static const int kTranslationIndexOffset = 1;
static const int kArgumentsStackHeightOffset = 2;
- static const int kDeoptEntrySize = 3;
+ static const int kPcOffset = 3;
+ static const int kDeoptEntrySize = 4;
// Simple element accessors.
#define DEFINE_ELEMENT_ACCESSORS(name, type) \
@@ -3639,11 +3984,6 @@ class DeoptimizationInputData: public FixedArray {
DEFINE_ELEMENT_ACCESSORS(OsrAstId, Smi)
DEFINE_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
- // Unchecked accessor to be used during GC.
- FixedArray* UncheckedLiteralArray() {
- return reinterpret_cast<FixedArray*>(get(kLiteralArrayIndex));
- }
-
#undef DEFINE_ELEMENT_ACCESSORS
// Accessors for elements of the ith deoptimization entry.
@@ -3658,6 +3998,7 @@ class DeoptimizationInputData: public FixedArray {
DEFINE_ENTRY_ACCESSORS(AstId, Smi)
DEFINE_ENTRY_ACCESSORS(TranslationIndex, Smi)
DEFINE_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
+ DEFINE_ENTRY_ACCESSORS(Pc, Smi)
#undef DEFINE_ENTRY_ACCESSORS
@@ -3717,8 +4058,48 @@ class DeoptimizationOutputData: public FixedArray {
};
-class SafepointEntry;
+// Forward declaration.
+class JSGlobalPropertyCell;
+
+// TypeFeedbackCells is a fixed array used to hold the association between
+// cache cells and AST ids for code generated by the full compiler.
+// The format of the these objects is
+// [i * 2]: Global property cell of ith cache cell.
+// [i * 2 + 1]: Ast ID for ith cache cell.
+class TypeFeedbackCells: public FixedArray {
+ public:
+ int CellCount() { return length() / 2; }
+ static int LengthOfFixedArray(int cell_count) { return cell_count * 2; }
+
+ // Accessors for AST ids associated with cache values.
+ inline Smi* AstId(int index);
+ inline void SetAstId(int index, Smi* id);
+ // Accessors for global property cells holding the cache values.
+ inline JSGlobalPropertyCell* Cell(int index);
+ inline void SetCell(int index, JSGlobalPropertyCell* cell);
+
+ // The object that indicates an uninitialized cache.
+ static inline Handle<Object> UninitializedSentinel(Isolate* isolate);
+
+ // The object that indicates a megamorphic state.
+ static inline Handle<Object> MegamorphicSentinel(Isolate* isolate);
+
+ // A raw version of the uninitialized sentinel that's safe to read during
+ // garbage collection (e.g., for patching the cache).
+ static inline Object* RawUninitializedSentinel(Heap* heap);
+
+ // Casting.
+ static inline TypeFeedbackCells* cast(Object* obj);
+
+ static const int kForInFastCaseMarker = 0;
+ static const int kForInSlowCaseMarker = 1;
+};
+
+
+// Forward declaration.
+class SafepointEntry;
+class TypeFeedbackInfo;
// Code describes objects with on-the-fly generated machine code.
class Code: public HeapObject {
@@ -3784,14 +4165,25 @@ class Code: public HeapObject {
DECL_ACCESSORS(relocation_info, ByteArray)
void InvalidateRelocation();
+ // [handler_table]: Fixed array containing offsets of exception handlers.
+ DECL_ACCESSORS(handler_table, FixedArray)
+
// [deoptimization_data]: Array containing data for deopt.
DECL_ACCESSORS(deoptimization_data, FixedArray)
- // [code_flushing_candidate]: Field only used during garbage
- // collection to hold code flushing candidates. The contents of this
+ // [type_feedback_info]: Struct containing type feedback information.
+ // Will contain either a TypeFeedbackInfo object, or undefined.
+ DECL_ACCESSORS(type_feedback_info, Object)
+
+ // [gc_metadata]: Field used to hold GC related metadata. The contents of this
// field does not have to be traced during garbage collection since
// it is only used by the garbage collector itself.
- DECL_ACCESSORS(next_code_flushing_candidate, Object)
+ DECL_ACCESSORS(gc_metadata, Object)
+
+ // [ic_age]: Inline caching age: the value of the Heap::global_ic_age
+ // at the moment when this object was created.
+ inline void set_ic_age(int count);
+ inline int ic_age();
// Unchecked accessors to be used during GC.
inline ByteArray* unchecked_relocation_info();
@@ -3858,6 +4250,11 @@ class Code: public HeapObject {
inline void set_allow_osr_at_loop_nesting_level(int level);
inline int allow_osr_at_loop_nesting_level();
+ // [profiler_ticks]: For FUNCTION kind, tells for how many profiler ticks
+ // the code object was seen on the stack with no IC patching going on.
+ inline int profiler_ticks();
+ inline void set_profiler_ticks(int ticks);
+
// [stack_slots]: For kind OPTIMIZED_FUNCTION, the number of stack slots
// reserved in the code prologue.
inline unsigned stack_slots();
@@ -3896,8 +4293,8 @@ class Code: public HeapObject {
inline byte to_boolean_state();
inline void set_to_boolean_state(byte value);
- // For kind STUB, major_key == CallFunction, tells whether there is
- // a function cache in the instruction stream.
+ // [has_function_cache]: For kind STUB tells whether there is a function
+ // cache is passed to the stub.
inline bool has_function_cache();
inline void set_has_function_cache(bool flag);
@@ -3911,6 +4308,28 @@ class Code: public HeapObject {
// Find the first map in an IC stub.
Map* FindFirstMap();
+ class ExtraICStateStrictMode: public BitField<StrictModeFlag, 0, 1> {};
+ class ExtraICStateKeyedAccessGrowMode:
+ public BitField<KeyedAccessGrowMode, 1, 1> {}; // NOLINT
+
+ static const int kExtraICStateGrowModeShift = 1;
+
+ static inline StrictModeFlag GetStrictMode(ExtraICState extra_ic_state) {
+ return ExtraICStateStrictMode::decode(extra_ic_state);
+ }
+
+ static inline KeyedAccessGrowMode GetKeyedAccessGrowMode(
+ ExtraICState extra_ic_state) {
+ return ExtraICStateKeyedAccessGrowMode::decode(extra_ic_state);
+ }
+
+ static inline ExtraICState ComputeExtraICState(
+ KeyedAccessGrowMode grow_mode,
+ StrictModeFlag strict_mode) {
+ return ExtraICStateKeyedAccessGrowMode::encode(grow_mode) |
+ ExtraICStateStrictMode::encode(strict_mode);
+ }
+
// Flags operations.
static inline Flags ComputeFlags(
Kind kind,
@@ -4004,6 +4423,7 @@ class Code: public HeapObject {
#ifdef DEBUG
void CodeVerify();
#endif
+ void ClearInlineCaches();
// Max loop nesting marker used to postpose OSR. We don't take loop
// nesting that is deeper than 5 levels into account.
@@ -4012,13 +4432,15 @@ class Code: public HeapObject {
// Layout description.
static const int kInstructionSizeOffset = HeapObject::kHeaderSize;
static const int kRelocationInfoOffset = kInstructionSizeOffset + kIntSize;
+ static const int kHandlerTableOffset = kRelocationInfoOffset + kPointerSize;
static const int kDeoptimizationDataOffset =
- kRelocationInfoOffset + kPointerSize;
- static const int kNextCodeFlushingCandidateOffset =
+ kHandlerTableOffset + kPointerSize;
+ static const int kTypeFeedbackInfoOffset =
kDeoptimizationDataOffset + kPointerSize;
- static const int kFlagsOffset =
- kNextCodeFlushingCandidateOffset + kPointerSize;
-
+ static const int kGCMetadataOffset = kTypeFeedbackInfoOffset + kPointerSize;
+ static const int kICAgeOffset =
+ kGCMetadataOffset + kPointerSize;
+ static const int kFlagsOffset = kICAgeOffset + kIntSize;
static const int kKindSpecificFlagsOffset = kFlagsOffset + kIntSize;
static const int kKindSpecificFlagsSize = 2 * kIntSize;
@@ -4051,6 +4473,7 @@ class Code: public HeapObject {
static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
static const int kAllowOSRAtLoopNestingLevelOffset = kFullCodeFlags + 1;
+ static const int kProfilerTicksOffset = kAllowOSRAtLoopNestingLevelOffset + 1;
static const int kSafepointTableOffsetOffset = kStackSlotsOffset + kIntSize;
static const int kStackCheckTableOffsetOffset = kStackSlotsOffset + kIntSize;
@@ -4058,8 +4481,8 @@ class Code: public HeapObject {
// Flags layout. BitField<type, shift, size>.
class ICStateField: public BitField<InlineCacheState, 0, 3> {};
class TypeField: public BitField<PropertyType, 3, 4> {};
- class KindField: public BitField<Kind, 7, 4> {};
- class CacheHolderField: public BitField<InlineCacheHolderFlag, 11, 1> {};
+ class CacheHolderField: public BitField<InlineCacheHolderFlag, 7, 1> {};
+ class KindField: public BitField<Kind, 8, 4> {};
class ExtraICStateField: public BitField<ExtraICState, 12, 2> {};
class IsPregeneratedField: public BitField<bool, 14, 1> {};
@@ -4067,6 +4490,7 @@ class Code: public HeapObject {
static const int kArgumentsCountShift = 15;
static const int kArgumentsCountMask = ~((1 << kArgumentsCountShift) - 1);
+ // This constant should be encodable in an ARM instruction.
static const int kFlagsNotUsedInLookup =
TypeField::kMask | CacheHolderField::kMask;
@@ -4177,11 +4601,11 @@ class Map: public HeapObject {
// Tells whether the instance has a call-as-function handler.
inline void set_has_instance_call_handler() {
- set_bit_field3(bit_field3() | (1 << kHasInstanceCallHandler));
+ set_bit_field(bit_field() | (1 << kHasInstanceCallHandler));
}
inline bool has_instance_call_handler() {
- return ((1 << kHasInstanceCallHandler) & bit_field3()) != 0;
+ return ((1 << kHasInstanceCallHandler) & bit_field()) != 0;
}
inline void set_is_extensible(bool value);
@@ -4228,6 +4652,11 @@ class Map: public HeapObject {
return elements_kind() == DICTIONARY_ELEMENTS;
}
+ inline bool has_slow_elements_kind() {
+ return elements_kind() == DICTIONARY_ELEMENTS
+ || elements_kind() == NON_STRICT_ARGUMENTS_ELEMENTS;
+ }
+
static bool IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind);
@@ -4249,20 +4678,6 @@ class Map: public HeapObject {
inline void set_is_access_check_needed(bool access_check_needed);
inline bool is_access_check_needed();
- // Whether the named interceptor is a fallback interceptor or not
- inline void set_named_interceptor_is_fallback(bool value);
- inline bool named_interceptor_is_fallback();
-
- // Tells whether the instance has the space for an external resource
- // object
- inline void set_has_external_resource(bool value);
- inline bool has_external_resource();
-
- // Tells whether the user object comparison callback should be used for
- // comparisons involving this object
- inline void set_use_user_object_comparison(bool value);
- inline bool use_user_object_comparison();
-
// [prototype]: implicit prototype object.
DECL_ACCESSORS(prototype, Object)
@@ -4338,8 +4753,9 @@ class Map: public HeapObject {
// Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex();
- // Returns the number of properties described in instance_descriptors.
- int NumberOfDescribedProperties();
+ // Returns the number of properties described in instance_descriptors
+ // filtering out properties with the specified attributes.
+ int NumberOfDescribedProperties(PropertyAttributes filter = NONE);
// Casting.
static inline Map* cast(Object* obj);
@@ -4373,7 +4789,7 @@ class Map: public HeapObject {
// This is undone in MarkCompactCollector::ClearNonLiveTransitions().
void CreateBackPointers();
- void CreateOneBackPointer(Map* transition_target);
+ void CreateOneBackPointer(Object* transition_target);
// Set all map transitions from this map to dead maps to null.
// Also, restore the original prototype on the targets of these
@@ -4381,6 +4797,13 @@ class Map: public HeapObject {
// following back pointers.
void ClearNonLiveTransitions(Heap* heap, Object* real_prototype);
+ // Restore a possible back pointer in the prototype field of object.
+ // Return true in that case and false otherwise. Set *keep_entry to
+ // true when a live map transition has been found.
+ bool RestoreOneBackPointer(Object* object,
+ Object* real_prototype,
+ bool* keep_entry);
+
// Computes a hash value for this map, to be used in HashTables and such.
int Hash();
@@ -4390,12 +4813,6 @@ class Map: public HeapObject {
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
- // Returns true if this map and |other| describe equivalent objects.
- // The "shared" flags of both this map and |other| are ignored.
- bool EquivalentTo(Map* other) {
- return EquivalentToForNormalization(other, KEEP_INOBJECT_PROPERTIES);
- }
-
// Returns the contents of this map's descriptor array for the given string.
// May return NULL. |safe_to_add_transition| is set to false and NULL
// is returned if adding transitions is not allowed.
@@ -4411,8 +4828,8 @@ class Map: public HeapObject {
// Adds an entry to this map's descriptor array for a transition to
// |transitioned_map| when its elements_kind is changed to |elements_kind|.
- MaybeObject* AddElementsTransition(ElementsKind elements_kind,
- Map* transitioned_map);
+ MUST_USE_RESULT MaybeObject* AddElementsTransition(
+ ElementsKind elements_kind, Map* transitioned_map);
// Returns the transitioned map for this map with the most generic
// elements_kind that's found in |candidates|, or null handle if no match is
@@ -4444,7 +4861,8 @@ class Map: public HeapObject {
Object* GetPrototypeTransition(Object* prototype);
- MaybeObject* PutPrototypeTransition(Object* prototype, Map* map);
+ MUST_USE_RESULT MaybeObject* PutPrototypeTransition(Object* prototype,
+ Map* map);
static const int kMaxPreAllocatedPropertyFields = 255;
@@ -4503,14 +4921,14 @@ class Map: public HeapObject {
static const int kHasNamedInterceptor = 3;
static const int kHasIndexedInterceptor = 4;
static const int kIsUndetectable = 5;
- static const int kHasExternalResource = 6;
+ static const int kHasInstanceCallHandler = 6;
static const int kIsAccessCheckNeeded = 7;
// Bit positions for bit field 2
static const int kIsExtensible = 0;
static const int kFunctionWithPrototype = 1;
static const int kStringWrapperSafeForDefaultValueOf = 2;
- static const int kUseUserObjectComparison = 3;
+ static const int kAttachedToSharedFunctionInfo = 3;
// No bits can be used after kElementsKindFirstBit, they are all reserved for
// storing ElementKind.
static const int kElementsKindShift = 4;
@@ -4527,9 +4945,6 @@ class Map: public HeapObject {
// Bit positions for bit field 3
static const int kIsShared = 0;
- static const int kNamedInterceptorIsFallback = 1;
- static const int kHasInstanceCallHandler = 2;
- static const int kAttachedToSharedFunctionInfo = 3;
// Layout of the default cache. It holds alternating name and code objects.
static const int kCodeCacheEntrySize = 2;
@@ -4572,6 +4987,12 @@ class Script: public Struct {
COMPILATION_TYPE_EVAL = 1
};
+ // Script compilation state.
+ enum CompilationState {
+ COMPILATION_STATE_INITIAL = 0,
+ COMPILATION_STATE_COMPILED = 1
+ };
+
// [source]: the script source.
DECL_ACCESSORS(source, Object)
@@ -4603,6 +5024,9 @@ class Script: public Struct {
// [compilation]: how the the script was compiled.
DECL_ACCESSORS(compilation_type, Smi)
+ // [is_compiled]: determines whether the script has already been compiled.
+ DECL_ACCESSORS(compilation_state, Smi)
+
// [line_ends]: FixedArray of line ends positions.
DECL_ACCESSORS(line_ends, Object)
@@ -4639,7 +5063,9 @@ class Script: public Struct {
static const int kWrapperOffset = kContextOffset + kPointerSize;
static const int kTypeOffset = kWrapperOffset + kPointerSize;
static const int kCompilationTypeOffset = kTypeOffset + kPointerSize;
- static const int kLineEndsOffset = kCompilationTypeOffset + kPointerSize;
+ static const int kCompilationStateOffset =
+ kCompilationTypeOffset + kPointerSize;
+ static const int kLineEndsOffset = kCompilationStateOffset + kPointerSize;
static const int kIdOffset = kLineEndsOffset + kPointerSize;
static const int kEvalFromSharedOffset = kIdOffset + kPointerSize;
static const int kEvalFrominstructionsOffsetOffset =
@@ -4683,7 +5109,10 @@ class Script: public Struct {
V(Math, atan, MathATan) \
V(Math, exp, MathExp) \
V(Math, sqrt, MathSqrt) \
- V(Math, pow, MathPow)
+ V(Math, pow, MathPow) \
+ V(Math, random, MathRandom) \
+ V(Math, max, MathMax) \
+ V(Math, min, MathMin)
enum BuiltinFunctionId {
@@ -4709,7 +5138,7 @@ class SharedFunctionInfo: public HeapObject {
DECL_ACCESSORS(code, Code)
// [scope_info]: Scope info.
- DECL_ACCESSORS(scope_info, SerializedScopeInfo)
+ DECL_ACCESSORS(scope_info, ScopeInfo)
// [construct stub]: Code stub for constructing instances of this function.
DECL_ACCESSORS(construct_stub, Code)
@@ -4895,10 +5324,18 @@ class SharedFunctionInfo: public HeapObject {
inline int compiler_hints();
inline void set_compiler_hints(int value);
+ inline int ast_node_count();
+ inline void set_ast_node_count(int count);
+
// A counter used to determine when to stress the deoptimizer with a
// deopt.
- inline Smi* deopt_counter();
- inline void set_deopt_counter(Smi* counter);
+ inline int deopt_counter();
+ inline void set_deopt_counter(int counter);
+
+ // Inline cache age is used to infer whether the function survived a context
+ // disposal or not. In the former case we reset the opt_count.
+ inline int ic_age();
+ inline void set_ic_age(int age);
// Add information on assignments of the form this.x = ...;
void SetThisPropertyAssignmentsInfo(
@@ -4931,15 +5368,20 @@ class SharedFunctionInfo: public HeapObject {
// spending time attempting to optimize it again.
DECL_BOOLEAN_ACCESSORS(optimization_disabled)
- // Indicates whether the function is a strict mode function.
- inline bool strict_mode();
+ // Indicates the language mode of the function's code as defined by the
+ // current harmony drafts for the next ES language standard. Possible
+ // values are:
+ // 1. CLASSIC_MODE - Unrestricted syntax and semantics, same as in ES5.
+ // 2. STRICT_MODE - Restricted syntax and semantics, same as in ES5.
+ // 3. EXTENDED_MODE - Only available under the harmony flag, not part of ES5.
+ inline LanguageMode language_mode();
+ inline void set_language_mode(LanguageMode language_mode);
- // Indicates the mode of the function.
- inline StrictModeFlag strict_mode_flag();
- inline void set_strict_mode_flag(StrictModeFlag strict_mode_flag);
+ // Indicates whether the language mode of this function is CLASSIC_MODE.
+ inline bool is_classic_mode();
- // Indicates whether the function is a qml mode function.
- DECL_BOOLEAN_ACCESSORS(qml_mode)
+ // Indicates whether the language mode of this function is EXTENDED_MODE.
+ inline bool is_extended_mode();
// False if the function definitely does not allocate an arguments object.
DECL_BOOLEAN_ACCESSORS(uses_arguments)
@@ -4967,6 +5409,15 @@ class SharedFunctionInfo: public HeapObject {
// through the API, which does not change this flag).
DECL_BOOLEAN_ACCESSORS(is_anonymous)
+ // Is this a function or top-level/eval code.
+ DECL_BOOLEAN_ACCESSORS(is_function)
+
+ // Indicates that the function cannot be optimized.
+ DECL_BOOLEAN_ACCESSORS(dont_optimize)
+
+ // Indicates that the function cannot be inlined.
+ DECL_BOOLEAN_ACCESSORS(dont_inline)
+
// Indicates whether or not the code in the shared function support
// deoptimization.
inline bool has_deoptimization_support();
@@ -4975,9 +5426,8 @@ class SharedFunctionInfo: public HeapObject {
void EnableDeoptimizationSupport(Code* recompiled);
// Disable (further) attempted optimization of all functions sharing this
- // shared function info. The function is the one we actually tried to
- // optimize.
- void DisableOptimization(JSFunction* function);
+ // shared function info.
+ void DisableOptimization();
// Lookup the bailout ID and ASSERT that it exists in the non-optimized
// code, returns whether it asserted (i.e., always true if assertions are
@@ -5004,7 +5454,7 @@ class SharedFunctionInfo: public HeapObject {
// [source code]: Source code for the function.
bool HasSourceCode();
- Object* GetSourceCode();
+ Handle<Object> GetSourceCode();
inline int opt_count();
inline void set_opt_count(int opt_count);
@@ -5031,6 +5481,8 @@ class SharedFunctionInfo: public HeapObject {
void SharedFunctionInfoVerify();
#endif
+ void ResetForNewContext(int new_ic_age);
+
// Helpers to compile the shared code. Returns true on success, false on
// failure (e.g., stack overflow during compilation).
static bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
@@ -5038,6 +5490,8 @@ class SharedFunctionInfo: public HeapObject {
static bool CompileLazy(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag);
+ void SharedFunctionInfoIterateBody(ObjectVisitor* v);
+
// Casting.
static inline SharedFunctionInfo* cast(Object* obj);
@@ -5061,12 +5515,13 @@ class SharedFunctionInfo: public HeapObject {
kInferredNameOffset + kPointerSize;
static const int kThisPropertyAssignmentsOffset =
kInitialMapOffset + kPointerSize;
- static const int kDeoptCounterOffset =
- kThisPropertyAssignmentsOffset + kPointerSize;
+ // ic_age is a Smi field. It could be grouped with another Smi field into a
+ // PSEUDO_SMI_ACCESSORS pair (on x64), if one becomes available.
+ static const int kICAgeOffset = kThisPropertyAssignmentsOffset + kPointerSize;
#if V8_HOST_ARCH_32_BIT
// Smi fields.
static const int kLengthOffset =
- kDeoptCounterOffset + kPointerSize;
+ kICAgeOffset + kPointerSize;
static const int kFormalParameterCountOffset = kLengthOffset + kPointerSize;
static const int kExpectedNofPropertiesOffset =
kFormalParameterCountOffset + kPointerSize;
@@ -5084,8 +5539,12 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + kPointerSize;
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kPointerSize;
+ static const int kAstNodeCountOffset = kOptCountOffset + kPointerSize;
+ static const int kDeoptCounterOffset = kAstNodeCountOffset + kPointerSize;
+
+
// Total size.
- static const int kSize = kOptCountOffset + kPointerSize;
+ static const int kSize = kDeoptCounterOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
@@ -5097,7 +5556,7 @@ class SharedFunctionInfo: public HeapObject {
// word is not set and thus this word cannot be treated as pointer
// to HeapObject during old space traversal.
static const int kLengthOffset =
- kDeoptCounterOffset + kPointerSize;
+ kICAgeOffset + kPointerSize;
static const int kFormalParameterCountOffset =
kLengthOffset + kIntSize;
@@ -5121,8 +5580,11 @@ class SharedFunctionInfo: public HeapObject {
static const int kOptCountOffset =
kThisPropertyAssignmentsCountOffset + kIntSize;
+ static const int kAstNodeCountOffset = kOptCountOffset + kIntSize;
+ static const int kDeoptCounterOffset = kAstNodeCountOffset + kIntSize;
+
// Total size.
- static const int kSize = kOptCountOffset + kIntSize;
+ static const int kSize = kDeoptCounterOffset + kIntSize;
#endif
@@ -5162,13 +5624,16 @@ class SharedFunctionInfo: public HeapObject {
kCodeAgeShift,
kOptimizationDisabled = kCodeAgeShift + kCodeAgeSize,
kStrictModeFunction,
- kQmlModeFunction,
+ kExtendedModeFunction,
kUsesArguments,
kHasDuplicateParameters,
kNative,
kBoundFunction,
kIsAnonymous,
kNameShouldPrintAsAnonymous,
+ kIsFunction,
+ kDontOptimize,
+ kDontInline,
kCompilerHintsCount // Pseudo entry
};
@@ -5193,18 +5658,26 @@ class SharedFunctionInfo: public HeapObject {
static const int kStrictModeBitWithinByte =
(kStrictModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
+ static const int kExtendedModeBitWithinByte =
+ (kExtendedModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
+
static const int kNativeBitWithinByte =
(kNative + kCompilerHintsSmiTagSize) % kBitsPerByte;
#if __BYTE_ORDER == __LITTLE_ENDIAN
static const int kStrictModeByteOffset = kCompilerHintsOffset +
(kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
+ static const int kExtendedModeByteOffset = kCompilerHintsOffset +
+ (kExtendedModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte;
static const int kNativeByteOffset = kCompilerHintsOffset +
(kNative + kCompilerHintsSmiTagSize) / kBitsPerByte;
#elif __BYTE_ORDER == __BIG_ENDIAN
static const int kStrictModeByteOffset = kCompilerHintsOffset +
(kCompilerHintsSize - 1) -
((kStrictModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte);
+ static const int kExtendedModeByteOffset = kCompilerHintsOffset +
+ (kCompilerHintsSize - 1) -
+ ((kExtendedModeFunction + kCompilerHintsSmiTagSize) / kBitsPerByte);
static const int kNativeByteOffset = kCompilerHintsOffset +
(kCompilerHintsSize - 1) -
((kNative + kCompilerHintsSmiTagSize) / kBitsPerByte);
@@ -5300,6 +5773,8 @@ class JSFunction: public JSObject {
// The initial map for an object created by this constructor.
inline Map* initial_map();
inline void set_initial_map(Map* value);
+ MUST_USE_RESULT inline MaybeObject* set_initial_map_and_cache_transitions(
+ Map* value);
inline bool has_initial_map();
// Get and set the prototype property on a JSFunction. If the
@@ -5310,7 +5785,7 @@ class JSFunction: public JSObject {
inline bool has_instance_prototype();
inline Object* prototype();
inline Object* instance_prototype();
- Object* SetInstancePrototype(Object* value);
+ MUST_USE_RESULT MaybeObject* SetInstancePrototype(Object* value);
MUST_USE_RESULT MaybeObject* SetPrototype(Object* value);
// After prototype is removed, it will not be created when accessed, and
@@ -5432,7 +5907,6 @@ class JSGlobalProxy : public JSObject {
// Forward declaration.
class JSBuiltinsObject;
-class JSGlobalPropertyCell;
// Common super class for JavaScript global objects and the special
// builtins global objects.
@@ -5555,7 +6029,7 @@ class JSBuiltinsObject: public GlobalObject {
};
-// Representation for JS Wrapper objects, String, Number, Boolean, Date, etc.
+// Representation for JS Wrapper objects, String, Number, Boolean, etc.
class JSValue: public JSObject {
public:
// [value]: the object being wrapped.
@@ -5584,6 +6058,106 @@ class JSValue: public JSObject {
};
+class DateCache;
+
+// Representation for JS date objects.
+class JSDate: public JSObject {
+ public:
+ // If one component is NaN, all of them are, indicating a NaN time value.
+ // [value]: the time value.
+ DECL_ACCESSORS(value, Object)
+ // [year]: caches year. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(year, Object)
+ // [month]: caches month. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(month, Object)
+ // [day]: caches day. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(day, Object)
+ // [weekday]: caches day of week. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(weekday, Object)
+ // [hour]: caches hours. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(hour, Object)
+ // [min]: caches minutes. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(min, Object)
+ // [sec]: caches seconds. Either undefined, smi, or NaN.
+ DECL_ACCESSORS(sec, Object)
+ // [cache stamp]: sample of the date cache stamp at the
+ // moment when local fields were cached.
+ DECL_ACCESSORS(cache_stamp, Object)
+
+ // Casting.
+ static inline JSDate* cast(Object* obj);
+
+ // Returns the date field with the specified index.
+ // See FieldIndex for the list of date fields.
+ static MaybeObject* GetField(Object* date, Smi* index);
+
+ void SetValue(Object* value, bool is_value_nan);
+
+
+ // Dispatched behavior.
+#ifdef OBJECT_PRINT
+ inline void JSDatePrint() {
+ JSDatePrint(stdout);
+ }
+ void JSDatePrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void JSDateVerify();
+#endif
+ // The order is important. It must be kept in sync with date macros
+ // in macros.py.
+ enum FieldIndex {
+ kDateValue,
+ kYear,
+ kMonth,
+ kDay,
+ kWeekday,
+ kHour,
+ kMinute,
+ kSecond,
+ kFirstUncachedField,
+ kMillisecond = kFirstUncachedField,
+ kDays,
+ kTimeInDay,
+ kFirstUTCField,
+ kYearUTC = kFirstUTCField,
+ kMonthUTC,
+ kDayUTC,
+ kWeekdayUTC,
+ kHourUTC,
+ kMinuteUTC,
+ kSecondUTC,
+ kMillisecondUTC,
+ kDaysUTC,
+ kTimeInDayUTC,
+ kTimezoneOffset
+ };
+
+ // Layout description.
+ static const int kValueOffset = JSObject::kHeaderSize;
+ static const int kYearOffset = kValueOffset + kPointerSize;
+ static const int kMonthOffset = kYearOffset + kPointerSize;
+ static const int kDayOffset = kMonthOffset + kPointerSize;
+ static const int kWeekdayOffset = kDayOffset + kPointerSize;
+ static const int kHourOffset = kWeekdayOffset + kPointerSize;
+ static const int kMinOffset = kHourOffset + kPointerSize;
+ static const int kSecOffset = kMinOffset + kPointerSize;
+ static const int kCacheStampOffset = kSecOffset + kPointerSize;
+ static const int kSize = kCacheStampOffset + kPointerSize;
+
+ private:
+ inline Object* DoGetField(FieldIndex index);
+
+ Object* GetUTCField(FieldIndex index, double value, DateCache* date_cache);
+
+ // Computes and caches the cacheable fields of the date.
+ inline void SetLocalFields(int64_t local_time_ms, DateCache* date_cache);
+
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSDate);
+};
+
+
// Representation of message objects used for error reporting through
// the API. The messages are formatted in JavaScript so this object is
// a real JavaScript object. The information used for formatting the
@@ -5791,7 +6365,7 @@ class JSRegExp: public JSObject {
};
-class CompilationCacheShape {
+class CompilationCacheShape : public BaseShape<HashTableKey*> {
public:
static inline bool IsMatch(HashTableKey* key, Object* value) {
return key->IsMatch(value);
@@ -5819,13 +6393,19 @@ class CompilationCacheTable: public HashTable<CompilationCacheShape,
public:
// Find cached value for a string key, otherwise return null.
Object* Lookup(String* src);
- Object* LookupEval(String* src, Context* context, StrictModeFlag strict_mode);
+ Object* LookupEval(String* src,
+ Context* context,
+ LanguageMode language_mode,
+ int scope_position);
Object* LookupRegExp(String* source, JSRegExp::Flags flags);
- MaybeObject* Put(String* src, Object* value);
- MaybeObject* PutEval(String* src,
- Context* context,
- SharedFunctionInfo* value);
- MaybeObject* PutRegExp(String* src, JSRegExp::Flags flags, FixedArray* value);
+ MUST_USE_RESULT MaybeObject* Put(String* src, Object* value);
+ MUST_USE_RESULT MaybeObject* PutEval(String* src,
+ Context* context,
+ SharedFunctionInfo* value,
+ int scope_position);
+ MUST_USE_RESULT MaybeObject* PutRegExp(String* src,
+ JSRegExp::Flags flags,
+ FixedArray* value);
// Remove given value from cache.
void Remove(Object* value);
@@ -5891,7 +6471,7 @@ class CodeCache: public Struct {
};
-class CodeCacheHashTableShape {
+class CodeCacheHashTableShape : public BaseShape<HashTableKey*> {
public:
static inline bool IsMatch(HashTableKey* key, Object* value) {
return key->IsMatch(value);
@@ -5986,25 +6566,97 @@ class PolymorphicCodeCacheHashTable
};
+class TypeFeedbackInfo: public Struct {
+ public:
+ inline int ic_total_count();
+ inline void set_ic_total_count(int count);
+
+ inline int ic_with_type_info_count();
+ inline void set_ic_with_type_info_count(int count);
+
+ DECL_ACCESSORS(type_feedback_cells, TypeFeedbackCells)
+
+ static inline TypeFeedbackInfo* cast(Object* obj);
+
+#ifdef OBJECT_PRINT
+ inline void TypeFeedbackInfoPrint() {
+ TypeFeedbackInfoPrint(stdout);
+ }
+ void TypeFeedbackInfoPrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void TypeFeedbackInfoVerify();
+#endif
+
+ static const int kIcTotalCountOffset = HeapObject::kHeaderSize;
+ static const int kIcWithTypeinfoCountOffset =
+ kIcTotalCountOffset + kPointerSize;
+ static const int kTypeFeedbackCellsOffset =
+ kIcWithTypeinfoCountOffset + kPointerSize;
+ static const int kSize = kTypeFeedbackCellsOffset + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(TypeFeedbackInfo);
+};
+
+
+// Representation of a slow alias as part of a non-strict arguments objects.
+// For fast aliases (if HasNonStrictArgumentsElements()):
+// - the parameter map contains an index into the context
+// - all attributes of the element have default values
+// For slow aliases (if HasDictionaryArgumentsElements()):
+// - the parameter map contains no fast alias mapping (i.e. the hole)
+// - this struct (in the slow backing store) contains an index into the context
+// - all attributes are available as part if the property details
+class AliasedArgumentsEntry: public Struct {
+ public:
+ inline int aliased_context_slot();
+ inline void set_aliased_context_slot(int count);
+
+ static inline AliasedArgumentsEntry* cast(Object* obj);
+
+#ifdef OBJECT_PRINT
+ inline void AliasedArgumentsEntryPrint() {
+ AliasedArgumentsEntryPrint(stdout);
+ }
+ void AliasedArgumentsEntryPrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void AliasedArgumentsEntryVerify();
+#endif
+
+ static const int kAliasedContextSlot = HeapObject::kHeaderSize;
+ static const int kSize = kAliasedContextSlot + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(AliasedArgumentsEntry);
+};
+
+
enum AllowNullsFlag {ALLOW_NULLS, DISALLOW_NULLS};
enum RobustnessFlag {ROBUST_STRING_TRAVERSAL, FAST_STRING_TRAVERSAL};
class StringHasher {
public:
- explicit inline StringHasher(int length);
+ explicit inline StringHasher(int length, uint32_t seed);
// Returns true if the hash of this string can be computed without
// looking at the contents.
inline bool has_trivial_hash();
// Add a character to the hash and update the array index calculation.
- inline void AddCharacter(uc32 c);
+ inline void AddCharacter(uint32_t c);
// Adds a character to the hash but does not update the array index
// calculation. This can only be called when it has been verified
// that the input is not an array index.
- inline void AddCharacterNoIndex(uc32 c);
+ inline void AddCharacterNoIndex(uint32_t c);
+
+ // Add a character above 0xffff as a surrogate pair. These can get into
+ // the hasher through the routines that take a UTF-8 string and make a symbol.
+ void AddSurrogatePair(uc32 c);
+ void AddSurrogatePairNoIndex(uc32 c);
// Returns the value to store in the hash field of a string with
// the given length and contents.
@@ -6023,6 +6675,11 @@ class StringHasher {
// value is represented decimal value.
static uint32_t MakeArrayIndexHash(uint32_t value, int length);
+ // No string is allowed to have a hash of zero. That value is reserved
+ // for internal properties. If the hash calculation yields zero then we
+ // use 27 instead.
+ static const int kZeroHash = 27;
+
private:
uint32_t array_index() {
ASSERT(is_array_index());
@@ -6043,7 +6700,9 @@ class StringHasher {
// Calculates string hash.
template <typename schar>
-inline uint32_t HashSequentialString(const schar* chars, int length);
+inline uint32_t HashSequentialString(const schar* chars,
+ int length,
+ uint32_t seed);
// The characteristics of a string are stored in its map. Retrieving these
@@ -6215,7 +6874,7 @@ class String: public HeapObject {
inline String* GetUnderlying();
// Mark the string as an undetectable object. It only applies to
- // ascii and two byte string types.
+ // ASCII and two byte string types.
bool MarkAsUndetectable();
// Return a substring.
@@ -6229,9 +6888,6 @@ class String: public HeapObject {
bool IsAsciiEqualTo(Vector<const char> str);
bool IsTwoByteEqualTo(Vector<const uc16> str);
- bool SlowEqualsExternal(uc16 *string, int length);
- bool SlowEqualsExternal(char *string, int length);
-
// Return a UTF8 representation of the string. The string is null
// terminated but may optionally contain nulls. Length is returned
// in length_output if length_output is not a null pointer The string
@@ -6250,9 +6906,6 @@ class String: public HeapObject {
RobustnessFlag robustness_flag = FAST_STRING_TRAVERSAL,
int* length_output = 0);
- inline int Utf8Length() { return Utf8Length(this, 0, length()); }
- static int Utf8Length(String* input, int from, int to);
-
// Return a 16 bit Unicode representation of the string.
// The string should be nearly flat, otherwise the performance of
// of this method may be very bad. Setting robustness_flag to
@@ -6269,7 +6922,8 @@ class String: public HeapObject {
inline uint32_t Hash();
static uint32_t ComputeHashField(unibrow::CharacterStream* buffer,
- int length);
+ int length,
+ uint32_t seed);
static bool ComputeArrayIndex(unibrow::CharacterStream* buffer,
uint32_t* index,
@@ -6314,13 +6968,10 @@ class String: public HeapObject {
// value into an array index.
static const int kMaxArrayIndexSize = 10;
- // Max ascii char code.
+ // Max ASCII char code.
static const int kMaxAsciiCharCode = unibrow::Utf8::kMaxOneByteChar;
static const unsigned kMaxAsciiCharCodeU = unibrow::Utf8::kMaxOneByteChar;
- static const int kMaxUC16CharCode = 0xffff;
-
- // Minimum length for a cons string.
- static const int kMinNonFlatLength = 13;
+ static const int kMaxUtf16CodeUnit = 0xffff;
// Mask constant for checking if a string has a computed hash code
// and if it is an array index. The least significant bit indicates
@@ -6334,6 +6985,10 @@ class String: public HeapObject {
// Shift constant retrieving hash code from hash field.
static const int kHashShift = kNofHashBitFields;
+ // Only these bits are relevant in the hash, since the top two are shifted
+ // out.
+ static const uint32_t kHashBitMask = 0xffffffffu >> kHashShift;
+
// Array index strings this short can keep their index in the hash
// field.
static const int kMaxCachedArrayIndexLength = 7;
@@ -6488,21 +7143,16 @@ class SeqString: public String {
// Casting.
static inline SeqString* cast(Object* obj);
- // Get and set the symbol id of the string
- inline int symbol_id();
- inline void set_symbol_id(int value);
-
// Layout description.
- static const int kSymbolIdOffset = String::kSize;
- static const int kHeaderSize = kSymbolIdOffset + kPointerSize;
+ static const int kHeaderSize = String::kSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(SeqString);
};
-// The AsciiString class captures sequential ascii string objects.
-// Each character in the AsciiString is an ascii character.
+// The AsciiString class captures sequential ASCII string objects.
+// Each character in the AsciiString is an ASCII character.
class SeqAsciiString: public SeqString {
public:
static const bool kHasAsciiEncoding = true;
@@ -6724,7 +7374,12 @@ class ExternalString: public String {
// Layout description.
static const int kResourceOffset = POINTER_SIZE_ALIGN(String::kSize);
- static const int kSize = kResourceOffset + kPointerSize;
+ static const int kShortSize = kResourceOffset + kPointerSize;
+ static const int kResourceDataOffset = kResourceOffset + kPointerSize;
+ static const int kSize = kResourceDataOffset + kPointerSize;
+
+ // Return whether external string is short (data pointer is not cached).
+ inline bool is_short();
STATIC_CHECK(kResourceOffset == Internals::kStringResourceOffset);
@@ -6745,8 +7400,16 @@ class ExternalAsciiString: public ExternalString {
inline const Resource* resource();
inline void set_resource(const Resource* buffer);
+ // Update the pointer cache to the external character array.
+ // The cached pointer is always valid, as the external character array does =
+ // not move during lifetime. Deserialization is the only exception, after
+ // which the pointer cache has to be refreshed.
+ inline void update_data_cache();
+
+ inline const char* GetChars();
+
// Dispatched behavior.
- uint16_t ExternalAsciiStringGet(int index);
+ inline uint16_t ExternalAsciiStringGet(int index);
// Casting.
static inline ExternalAsciiString* cast(Object* obj);
@@ -6782,11 +7445,19 @@ class ExternalTwoByteString: public ExternalString {
inline const Resource* resource();
inline void set_resource(const Resource* buffer);
+ // Update the pointer cache to the external character array.
+ // The cached pointer is always valid, as the external character array does =
+ // not move during lifetime. Deserialization is the only exception, after
+ // which the pointer cache has to be refreshed.
+ inline void update_data_cache();
+
+ inline const uint16_t* GetChars();
+
// Dispatched behavior.
- uint16_t ExternalTwoByteStringGet(int index);
+ inline uint16_t ExternalTwoByteStringGet(int index);
// For regexp code.
- const uint16_t* ExternalTwoByteStringGetData(unsigned start);
+ inline const uint16_t* ExternalTwoByteStringGetData(unsigned start);
// Casting.
static inline ExternalTwoByteString* cast(Object* obj);
@@ -6931,9 +7602,6 @@ class Oddball: public HeapObject {
static const byte kUndefined = 5;
static const byte kOther = 6;
- // The ToNumber value of a hidden oddball is a negative smi.
- static const int kLeastHiddenOddballNumber = -5;
-
typedef FixedBodyDescriptor<kToStringOffset,
kToNumberOffset + kPointerSize,
kSize> BodyDescriptor;
@@ -7116,7 +7784,7 @@ class JSFunctionProxy: public JSProxy {
};
-// The JSSet describes EcmaScript Harmony maps
+// The JSSet describes EcmaScript Harmony sets
class JSSet: public JSObject {
public:
// [set]: the backing hash set containing keys.
@@ -7179,9 +7847,6 @@ class JSWeakMap: public JSObject {
// [next]: linked list of encountered weak maps during GC.
DECL_ACCESSORS(next, Object)
- // Unchecked accessors to be used during GC.
- inline ObjectHashTable* unchecked_table();
-
// Casting.
static inline JSWeakMap* cast(Object* obj);
@@ -7266,8 +7931,12 @@ class JSArray: public JSObject {
// capacity is non-zero.
MUST_USE_RESULT MaybeObject* Initialize(int capacity);
+ // Initializes the array to a certain length.
+ inline bool AllowsSetElementsLength();
+ MUST_USE_RESULT MaybeObject* SetElementsLength(Object* length);
+
// Set the content of the array to the content of storage.
- inline MaybeObject* SetContent(FixedArray* storage);
+ MUST_USE_RESULT inline MaybeObject* SetContent(FixedArrayBase* storage);
// Casting.
static inline JSArray* cast(Object* obj);
@@ -7382,6 +8051,59 @@ class AccessorInfo: public Struct {
};
+// Support for JavaScript accessors: A pair of a getter and a setter. Each
+// accessor can either be
+// * a pointer to a JavaScript function or proxy: a real accessor
+// * undefined: considered an accessor by the spec, too, strangely enough
+// * the hole: an accessor which has not been set
+// * a pointer to a map: a transition used to ensure map sharing
+class AccessorPair: public Struct {
+ public:
+ DECL_ACCESSORS(getter, Object)
+ DECL_ACCESSORS(setter, Object)
+
+ static inline AccessorPair* cast(Object* obj);
+
+ MUST_USE_RESULT MaybeObject* CopyWithoutTransitions();
+
+ // Note: Returns undefined instead in case of a hole.
+ Object* GetComponent(AccessorComponent component);
+
+ // Set both components, skipping arguments which are a JavaScript null.
+ void SetComponents(Object* getter, Object* setter) {
+ if (!getter->IsNull()) set_getter(getter);
+ if (!setter->IsNull()) set_setter(setter);
+ }
+
+ bool ContainsAccessor() {
+ return IsJSAccessor(getter()) || IsJSAccessor(setter());
+ }
+
+#ifdef OBJECT_PRINT
+ void AccessorPairPrint(FILE* out = stdout);
+#endif
+#ifdef DEBUG
+ void AccessorPairVerify();
+#endif
+
+ static const int kGetterOffset = HeapObject::kHeaderSize;
+ static const int kSetterOffset = kGetterOffset + kPointerSize;
+ static const int kSize = kSetterOffset + kPointerSize;
+
+ private:
+ // Strangely enough, in addition to functions and harmony proxies, the spec
+ // requires us to consider undefined as a kind of accessor, too:
+ // var obj = {};
+ // Object.defineProperty(obj, "foo", {get: undefined});
+ // assertTrue("foo" in obj);
+ bool IsJSAccessor(Object* obj) {
+ return obj->IsSpecFunction() || obj->IsUndefined();
+ }
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(AccessorPair);
+};
+
+
class AccessCheckInfo: public Struct {
public:
DECL_ACCESSORS(named_callback, Object)
@@ -7418,7 +8140,6 @@ class InterceptorInfo: public Struct {
DECL_ACCESSORS(deleter, Object)
DECL_ACCESSORS(enumerator, Object)
DECL_ACCESSORS(data, Object)
- DECL_ACCESSORS(is_fallback, Smi)
static inline InterceptorInfo* cast(Object* obj);
@@ -7438,8 +8159,7 @@ class InterceptorInfo: public Struct {
static const int kDeleterOffset = kQueryOffset + kPointerSize;
static const int kEnumeratorOffset = kDeleterOffset + kPointerSize;
static const int kDataOffset = kEnumeratorOffset + kPointerSize;
- static const int kFallbackOffset = kDataOffset + kPointerSize;
- static const int kSize = kFallbackOffset + kPointerSize;
+ static const int kSize = kDataOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(InterceptorInfo);
@@ -7484,7 +8204,8 @@ class TemplateInfo: public Struct {
static const int kTagOffset = HeapObject::kHeaderSize;
static const int kPropertyListOffset = kTagOffset + kPointerSize;
static const int kHeaderSize = kPropertyListOffset + kPointerSize;
- protected:
+
+ private:
DISALLOW_IMPLICIT_CONSTRUCTORS(TemplateInfo);
};
@@ -7561,8 +8282,6 @@ class ObjectTemplateInfo: public TemplateInfo {
public:
DECL_ACCESSORS(constructor, Object)
DECL_ACCESSORS(internal_field_count, Object)
- DECL_ACCESSORS(has_external_resource, Object)
- DECL_ACCESSORS(use_user_object_comparison, Object)
static inline ObjectTemplateInfo* cast(Object* obj);
@@ -7579,9 +8298,7 @@ class ObjectTemplateInfo: public TemplateInfo {
static const int kConstructorOffset = TemplateInfo::kHeaderSize;
static const int kInternalFieldCountOffset =
kConstructorOffset + kPointerSize;
- static const int kHasExternalResourceOffset = kInternalFieldCountOffset + kPointerSize;
- static const int kUseUserObjectComparisonOffset = kHasExternalResourceOffset + kPointerSize;
- static const int kSize = kUseUserObjectComparisonOffset + kPointerSize;
+ static const int kSize = kInternalFieldCountOffset + kPointerSize;
};
@@ -7754,6 +8471,34 @@ class BreakPointInfo: public Struct {
#undef DECL_BOOLEAN_ACCESSORS
#undef DECL_ACCESSORS
+#define VISITOR_SYNCHRONIZATION_TAGS_LIST(V) \
+ V(kSymbolTable, "symbol_table", "(Symbols)") \
+ V(kExternalStringsTable, "external_strings_table", "(External strings)") \
+ V(kStrongRootList, "strong_root_list", "(Strong roots)") \
+ V(kSymbol, "symbol", "(Symbol)") \
+ V(kBootstrapper, "bootstrapper", "(Bootstrapper)") \
+ V(kTop, "top", "(Isolate)") \
+ V(kRelocatable, "relocatable", "(Relocatable)") \
+ V(kDebug, "debug", "(Debugger)") \
+ V(kCompilationCache, "compilationcache", "(Compilation cache)") \
+ V(kHandleScope, "handlescope", "(Handle scope)") \
+ V(kBuiltins, "builtins", "(Builtins)") \
+ V(kGlobalHandles, "globalhandles", "(Global handles)") \
+ V(kThreadManager, "threadmanager", "(Thread manager)") \
+ V(kExtensions, "Extensions", "(Extensions)")
+
+class VisitorSynchronization : public AllStatic {
+ public:
+#define DECLARE_ENUM(enum_item, ignore1, ignore2) enum_item,
+ enum SyncTag {
+ VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_ENUM)
+ kNumberOfSyncTags
+ };
+#undef DECLARE_ENUM
+
+ static const char* const kTags[kNumberOfSyncTags];
+ static const char* const kTagNames[kNumberOfSyncTags];
+};
// Abstract base class for visiting, and optionally modifying, the
// pointers contained in Objects. Used in GC and serialization/deserialization.
@@ -7795,11 +8540,15 @@ class ObjectVisitor BASE_EMBEDDED {
// Visit pointer embedded into a code object.
virtual void VisitEmbeddedPointer(RelocInfo* rinfo);
+ virtual void VisitSharedFunctionInfo(SharedFunctionInfo* shared) {}
+
// Visits a contiguous arrays of external references (references to the C++
// heap) in the half-open range [start, end). Any or all of the values
// may be modified on return.
virtual void VisitExternalReferences(Address* start, Address* end) {}
+ virtual void VisitExternalReference(RelocInfo* rinfo);
+
inline void VisitExternalReference(Address* p) {
VisitExternalReferences(p, p + 1);
}
@@ -7807,13 +8556,10 @@ class ObjectVisitor BASE_EMBEDDED {
// Visits a handle that has an embedder-assigned class ID.
virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {}
-#ifdef DEBUG
// Intended for serialization/deserialization checking: insert, or
// check for the presence of, a tag at this position in the stream.
- virtual void Synchronize(const char* tag) {}
-#else
- inline void Synchronize(const char* tag) {}
-#endif
+ // Also used for marking up GC roots in heap snapshots.
+ virtual void Synchronize(VisitorSynchronization::SyncTag tag) {}
};
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-utils.h b/src/3rdparty/v8/src/once.cc
index 7c31528..37fe369 100644
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-utils.h
+++ b/src/3rdparty/v8/src/once.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,45 +25,53 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_
+#include "once.h"
-#include "include/v8.h"
+#ifdef _WIN32
+#include <windows.h>
+#else
+#include <sched.h>
+#endif
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class UnicodeString;
-}
+#include "atomicops.h"
+#include "checks.h"
namespace v8 {
namespace internal {
-class I18NUtils {
- public:
- // Safe string copy. Null terminates the destination. Copies at most
- // (length - 1) bytes.
- // We can't use snprintf since it's not supported on all relevant platforms.
- // We can't use OS::SNPrintF, it's only for internal code.
- static void StrNCopy(char* dest, int length, const char* src);
-
- // Extract a string setting named in |settings| and set it to |result|.
- // Return true if it's specified. Otherwise, return false.
- static bool ExtractStringSetting(const v8::Handle<v8::Object>& settings,
- const char* setting,
- icu::UnicodeString* result);
+void CallOnceImpl(OnceType* once, PointerArgFunction init_func, void* arg) {
+ AtomicWord state = Acquire_Load(once);
+ // Fast path. The provided function was already executed.
+ if (state == ONCE_STATE_DONE) {
+ return;
+ }
- // Converts ASCII array into UChar array.
- // Target is always \0 terminated.
- static void AsciiToUChar(const char* source,
- int32_t source_length,
- UChar* target,
- int32_t target_length);
-
- private:
- I18NUtils() {}
-};
+ // The function execution did not complete yet. The once object can be in one
+ // of the two following states:
+ // - UNINITIALIZED: We are the first thread calling this function.
+ // - EXECUTING_FUNCTION: Another thread is already executing the function.
+ //
+ // First, try to change the state from UNINITIALIZED to EXECUTING_FUNCTION
+ // atomically.
+ state = Acquire_CompareAndSwap(
+ once, ONCE_STATE_UNINITIALIZED, ONCE_STATE_EXECUTING_FUNCTION);
+ if (state == ONCE_STATE_UNINITIALIZED) {
+ // We are the first thread to call this function, so we have to call the
+ // function.
+ init_func(arg);
+ Release_Store(once, ONCE_STATE_DONE);
+ } else {
+ // Another thread has already started executing the function. We need to
+ // wait until it completes the initialization.
+ while (state == ONCE_STATE_EXECUTING_FUNCTION) {
+#ifdef _WIN32
+ ::Sleep(0);
+#else
+ sched_yield();
+#endif
+ state = Acquire_Load(once);
+ }
+ }
+}
} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_UTILS_H_
diff --git a/src/3rdparty/v8/src/once.h b/src/3rdparty/v8/src/once.h
new file mode 100644
index 0000000..a44b8fa
--- /dev/null
+++ b/src/3rdparty/v8/src/once.h
@@ -0,0 +1,123 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// emulates google3/base/once.h
+//
+// This header is intended to be included only by v8's internal code. Users
+// should not use this directly.
+//
+// This is basically a portable version of pthread_once().
+//
+// This header declares:
+// * A type called OnceType.
+// * A macro V8_DECLARE_ONCE() which declares a (global) variable of type
+// OnceType.
+// * A function CallOnce(OnceType* once, void (*init_func)()).
+// This function, when invoked multiple times given the same OnceType object,
+// will invoke init_func on the first call only, and will make sure none of
+// the calls return before that first call to init_func has finished.
+//
+// Additionally, the following features are supported:
+// * A macro V8_ONCE_INIT which is expanded into the expression used to
+// initialize a OnceType. This is only useful when clients embed a OnceType
+// into a structure of their own and want to initialize it statically.
+// * The user can provide a parameter which CallOnce() forwards to the
+// user-provided function when it is called. Usage example:
+// CallOnce(&my_once, &MyFunctionExpectingIntArgument, 10);
+// * This implementation guarantees that OnceType is a POD (i.e. no static
+// initializer generated).
+//
+// This implements a way to perform lazy initialization. It's more efficient
+// than using mutexes as no lock is needed if initialization has already
+// happened.
+//
+// Example usage:
+// void Init();
+// V8_DECLARE_ONCE(once_init);
+//
+// // Calls Init() exactly once.
+// void InitOnce() {
+// CallOnce(&once_init, &Init);
+// }
+//
+// Note that if CallOnce() is called before main() has begun, it must
+// only be called by the thread that will eventually call main() -- that is,
+// the thread that performs dynamic initialization. In general this is a safe
+// assumption since people don't usually construct threads before main() starts,
+// but it is technically not guaranteed. Unfortunately, Win32 provides no way
+// whatsoever to statically-initialize its synchronization primitives, so our
+// only choice is to assume that dynamic initialization is single-threaded.
+
+#ifndef V8_ONCE_H_
+#define V8_ONCE_H_
+
+#include "atomicops.h"
+
+namespace v8 {
+namespace internal {
+
+typedef AtomicWord OnceType;
+
+#define V8_ONCE_INIT 0
+
+#define V8_DECLARE_ONCE(NAME) ::v8::internal::OnceType NAME
+
+enum {
+ ONCE_STATE_UNINITIALIZED = 0,
+ ONCE_STATE_EXECUTING_FUNCTION = 1,
+ ONCE_STATE_DONE = 2
+};
+
+typedef void (*NoArgFunction)();
+typedef void (*PointerArgFunction)(void* arg);
+
+template <typename T>
+struct OneArgFunction {
+ typedef void (*type)(T);
+};
+
+void CallOnceImpl(OnceType* once, PointerArgFunction init_func, void* arg);
+
+inline void CallOnce(OnceType* once, NoArgFunction init_func) {
+ if (Acquire_Load(once) != ONCE_STATE_DONE) {
+ CallOnceImpl(once, reinterpret_cast<PointerArgFunction>(init_func), NULL);
+ }
+}
+
+
+template <typename Arg>
+inline void CallOnce(OnceType* once,
+ typename OneArgFunction<Arg*>::type init_func, Arg* arg) {
+ if (Acquire_Load(once) != ONCE_STATE_DONE) {
+ CallOnceImpl(once, reinterpret_cast<PointerArgFunction>(init_func),
+ static_cast<void*>(arg));
+ }
+}
+
+} } // namespace v8::internal
+
+#endif // V8_ONCE_H_
diff --git a/src/3rdparty/v8/src/parser.cc b/src/3rdparty/v8/src/parser.cc
index 8e55fe6..da68041 100644
--- a/src/3rdparty/v8/src/parser.cc
+++ b/src/3rdparty/v8/src/parser.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,7 +28,7 @@
#include "v8.h"
#include "api.h"
-#include "ast-inl.h"
+#include "ast.h"
#include "bootstrapper.h"
#include "char-predicates-inl.h"
#include "codegen.h"
@@ -258,7 +258,7 @@ Handle<String> Parser::LookupSymbol(int symbol_id) {
scanner().literal_ascii_string());
} else {
return isolate()->factory()->LookupTwoByteSymbol(
- scanner().literal_uc16_string());
+ scanner().literal_utf16_string());
}
}
return LookupCachedSymbol(symbol_id);
@@ -279,7 +279,7 @@ Handle<String> Parser::LookupCachedSymbol(int symbol_id) {
scanner().literal_ascii_string());
} else {
result = isolate()->factory()->LookupTwoByteSymbol(
- scanner().literal_uc16_string());
+ scanner().literal_utf16_string());
}
symbol_cache_.at(symbol_id) = result;
return result;
@@ -459,104 +459,53 @@ class TargetScope BASE_EMBEDDED {
// ----------------------------------------------------------------------------
-// LexicalScope and SaveScope are stack allocated support classes to facilitate
-// anipulation of the Parser's scope stack. The constructor sets the parser's
-// top scope to the incoming scope, and the destructor resets it. Additionally,
-// LexicalScope stores transient information used during parsing.
+// FunctionState and BlockState together implement the parser's scope stack.
+// The parser's current scope is in top_scope_. The BlockState and
+// FunctionState constructors push on the scope stack and the destructors
+// pop. They are also used to hold the parser's per-function and per-block
+// state.
-
-class SaveScope BASE_EMBEDDED {
+class Parser::BlockState BASE_EMBEDDED {
public:
- SaveScope(Parser* parser, Scope* scope)
+ BlockState(Parser* parser, Scope* scope)
: parser_(parser),
- previous_top_scope_(parser->top_scope_) {
+ outer_scope_(parser->top_scope_) {
parser->top_scope_ = scope;
}
- ~SaveScope() {
- parser_->top_scope_ = previous_top_scope_;
- }
+ ~BlockState() { parser_->top_scope_ = outer_scope_; }
private:
- // Bookkeeping
Parser* parser_;
- // Previous values
- Scope* previous_top_scope_;
+ Scope* outer_scope_;
};
-class LexicalScope BASE_EMBEDDED {
- public:
- LexicalScope(Parser* parser, Scope* scope, Isolate* isolate);
- ~LexicalScope();
-
- int NextMaterializedLiteralIndex() {
- int next_index =
- materialized_literal_count_ + JSFunction::kLiteralsPrefixSize;
- materialized_literal_count_++;
- return next_index;
- }
- int materialized_literal_count() { return materialized_literal_count_; }
-
- void SetThisPropertyAssignmentInfo(
- bool only_simple_this_property_assignments,
- Handle<FixedArray> this_property_assignments) {
- only_simple_this_property_assignments_ =
- only_simple_this_property_assignments;
- this_property_assignments_ = this_property_assignments;
- }
- bool only_simple_this_property_assignments() {
- return only_simple_this_property_assignments_;
- }
- Handle<FixedArray> this_property_assignments() {
- return this_property_assignments_;
- }
-
- void AddProperty() { expected_property_count_++; }
- int expected_property_count() { return expected_property_count_; }
-
- private:
- // Captures the number of literals that need materialization in the
- // function. Includes regexp literals, and boilerplate for object
- // and array literals.
- int materialized_literal_count_;
-
- // Properties count estimation.
- int expected_property_count_;
-
- // Keeps track of assignments to properties of this. Used for
- // optimizing constructors.
- bool only_simple_this_property_assignments_;
- Handle<FixedArray> this_property_assignments_;
-
- // Bookkeeping
- Parser* parser_;
- // Previous values
- LexicalScope* lexical_scope_parent_;
- Scope* previous_scope_;
- unsigned previous_ast_node_id_;
-};
-
-
-LexicalScope::LexicalScope(Parser* parser, Scope* scope, Isolate* isolate)
- : materialized_literal_count_(0),
- expected_property_count_(0),
- only_simple_this_property_assignments_(false),
- this_property_assignments_(isolate->factory()->empty_fixed_array()),
- parser_(parser),
- lexical_scope_parent_(parser->lexical_scope_),
- previous_scope_(parser->top_scope_),
- previous_ast_node_id_(isolate->ast_node_id()) {
+Parser::FunctionState::FunctionState(Parser* parser,
+ Scope* scope,
+ Isolate* isolate)
+ : next_materialized_literal_index_(JSFunction::kLiteralsPrefixSize),
+ next_handler_index_(0),
+ expected_property_count_(0),
+ only_simple_this_property_assignments_(false),
+ this_property_assignments_(isolate->factory()->empty_fixed_array()),
+ parser_(parser),
+ outer_function_state_(parser->current_function_state_),
+ outer_scope_(parser->top_scope_),
+ saved_ast_node_id_(isolate->ast_node_id()),
+ factory_(isolate) {
parser->top_scope_ = scope;
- parser->lexical_scope_ = this;
+ parser->current_function_state_ = this;
isolate->set_ast_node_id(AstNode::kDeclarationsId + 1);
}
-LexicalScope::~LexicalScope() {
- parser_->top_scope_ = previous_scope_;
- parser_->lexical_scope_ = lexical_scope_parent_;
- parser_->isolate()->set_ast_node_id(previous_ast_node_id_);
+Parser::FunctionState::~FunctionState() {
+ parser_->top_scope_ = outer_scope_;
+ parser_->current_function_state_ = outer_function_state_;
+ if (outer_function_state_ != NULL) {
+ parser_->isolate()->set_ast_node_id(saved_ast_node_id_);
+ }
}
@@ -584,34 +533,40 @@ LexicalScope::~LexicalScope() {
// Implementation of Parser
Parser::Parser(Handle<Script> script,
- bool allow_natives_syntax,
+ int parser_flags,
v8::Extension* extension,
ScriptDataImpl* pre_data)
: isolate_(script->GetIsolate()),
symbol_cache_(pre_data ? pre_data->symbol_count() : 0),
script_(script),
scanner_(isolate_->unicode_cache()),
+ reusable_preparser_(NULL),
top_scope_(NULL),
- lexical_scope_(NULL),
+ current_function_state_(NULL),
target_stack_(NULL),
- allow_natives_syntax_(allow_natives_syntax),
extension_(extension),
pre_data_(pre_data),
fni_(NULL),
+ allow_natives_syntax_((parser_flags & kAllowNativesSyntax) != 0),
+ allow_lazy_((parser_flags & kAllowLazy) != 0),
+ allow_modules_((parser_flags & kAllowModules) != 0),
stack_overflow_(false),
- parenthesized_function_(false),
- harmony_scoping_(false) {
- AstNode::ResetIds();
+ parenthesized_function_(false) {
+ isolate_->set_ast_node_id(0);
+ if ((parser_flags & kLanguageModeMask) == EXTENDED_MODE) {
+ scanner().SetHarmonyScoping(true);
+ }
+ if ((parser_flags & kAllowModules) != 0) {
+ scanner().SetHarmonyModules(true);
+ }
}
-FunctionLiteral* Parser::ParseProgram(Handle<String> source,
- bool in_global_context,
- StrictModeFlag strict_mode,
- bool qml_mode) {
+FunctionLiteral* Parser::ParseProgram(CompilationInfo* info) {
ZoneScope zone_scope(isolate(), DONT_DELETE_ON_EXIT);
HistogramTimerScope timer(isolate()->counters()->parse());
+ Handle<String> source(String::cast(script_->source()));
isolate()->counters()->total_parse_size()->Increment(source->length());
fni_ = new(zone()) FuncNameInferrer(isolate());
@@ -621,69 +576,74 @@ FunctionLiteral* Parser::ParseProgram(Handle<String> source,
// Notice that the stream is destroyed at the end of the branch block.
// The last line of the blocks can't be moved outside, even though they're
// identical calls.
- ExternalTwoByteStringUC16CharacterStream stream(
+ ExternalTwoByteStringUtf16CharacterStream stream(
Handle<ExternalTwoByteString>::cast(source), 0, source->length());
scanner_.Initialize(&stream);
- return DoParseProgram(source, in_global_context, strict_mode, qml_mode, &zone_scope);
+ return DoParseProgram(info, source, &zone_scope);
} else {
- GenericStringUC16CharacterStream stream(source, 0, source->length());
+ GenericStringUtf16CharacterStream stream(source, 0, source->length());
scanner_.Initialize(&stream);
- return DoParseProgram(source, in_global_context, strict_mode, qml_mode, &zone_scope);
+ return DoParseProgram(info, source, &zone_scope);
}
}
-FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
- bool in_global_context,
- StrictModeFlag strict_mode,
- bool qml_mode,
+FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info,
+ Handle<String> source,
ZoneScope* zone_scope) {
ASSERT(top_scope_ == NULL);
ASSERT(target_stack_ == NULL);
if (pre_data_ != NULL) pre_data_->Initialize();
// Compute the parsing mode.
- mode_ = FLAG_lazy ? PARSE_LAZILY : PARSE_EAGERLY;
+ mode_ = (FLAG_lazy && allow_lazy_) ? PARSE_LAZILY : PARSE_EAGERLY;
if (allow_natives_syntax_ || extension_ != NULL) mode_ = PARSE_EAGERLY;
- ScopeType type = in_global_context ? GLOBAL_SCOPE : EVAL_SCOPE;
Handle<String> no_name = isolate()->factory()->empty_symbol();
FunctionLiteral* result = NULL;
- { Scope* scope = NewScope(top_scope_, type);
+ { Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
+ info->SetGlobalScope(scope);
+ if (info->is_eval()) {
+ Handle<SharedFunctionInfo> shared = info->shared_info();
+ if (!info->is_global() && (shared.is_null() || shared->is_function())) {
+ scope = Scope::DeserializeScopeChain(*info->calling_context(), scope);
+ }
+ if (!scope->is_global_scope() || info->language_mode() != CLASSIC_MODE) {
+ scope = NewScope(scope, EVAL_SCOPE);
+ }
+ }
scope->set_start_position(0);
scope->set_end_position(source->length());
- LexicalScope lexical_scope(this, scope, isolate());
- ASSERT(top_scope_->strict_mode_flag() == kNonStrictMode);
- top_scope_->SetStrictModeFlag(strict_mode);
- if (qml_mode) {
- scope->EnableQmlMode();
- }
+ FunctionState function_state(this, scope, isolate());
+ top_scope_->SetLanguageMode(info->language_mode());
ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(16);
bool ok = true;
int beg_loc = scanner().location().beg_pos;
- ParseSourceElements(body, Token::EOS, &ok);
- if (ok && top_scope_->is_strict_mode()) {
+ ParseSourceElements(body, Token::EOS, info->is_eval(), &ok);
+ if (ok && !top_scope_->is_classic_mode()) {
CheckOctalLiteral(beg_loc, scanner().location().end_pos, &ok);
}
- if (ok && harmony_scoping_) {
- CheckConflictingVarDeclarations(scope, &ok);
+ if (ok && is_extended_mode()) {
+ CheckConflictingVarDeclarations(top_scope_, &ok);
}
if (ok) {
- result = new(zone()) FunctionLiteral(
- isolate(),
+ result = factory()->NewFunctionLiteral(
no_name,
top_scope_,
body,
- lexical_scope.materialized_literal_count(),
- lexical_scope.expected_property_count(),
- lexical_scope.only_simple_this_property_assignments(),
- lexical_scope.this_property_assignments(),
+ function_state.materialized_literal_count(),
+ function_state.expected_property_count(),
+ function_state.handler_count(),
+ function_state.only_simple_this_property_assignments(),
+ function_state.this_property_assignments(),
0,
+ FunctionLiteral::kNoDuplicateParameters,
FunctionLiteral::ANONYMOUS_EXPRESSION,
- false); // Does not have duplicate parameters.
+ FunctionLiteral::kGlobalOrEval);
+ result->set_ast_properties(factory()->visitor()->ast_properties());
} else if (stack_overflow_) {
isolate()->StackOverflow();
}
@@ -698,6 +658,7 @@ FunctionLiteral* Parser::DoParseProgram(Handle<String> source,
return result;
}
+
FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
ZoneScope zone_scope(isolate(), DONT_DELETE_ON_EXIT);
HistogramTimerScope timer(isolate()->counters()->parse_lazy());
@@ -708,16 +669,16 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
// Initialize parser state.
source->TryFlatten();
if (source->IsExternalTwoByteString()) {
- ExternalTwoByteStringUC16CharacterStream stream(
+ ExternalTwoByteStringUtf16CharacterStream stream(
Handle<ExternalTwoByteString>::cast(source),
shared_info->start_position(),
shared_info->end_position());
FunctionLiteral* result = ParseLazy(info, &stream, &zone_scope);
return result;
} else {
- GenericStringUC16CharacterStream stream(source,
- shared_info->start_position(),
- shared_info->end_position());
+ GenericStringUtf16CharacterStream stream(source,
+ shared_info->start_position(),
+ shared_info->end_position());
FunctionLiteral* result = ParseLazy(info, &stream, &zone_scope);
return result;
}
@@ -725,7 +686,7 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
- UC16CharacterStream* source,
+ Utf16CharacterStream* source,
ZoneScope* zone_scope) {
Handle<SharedFunctionInfo> shared_info = info->shared_info();
scanner_.Initialize(source);
@@ -744,18 +705,16 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info,
{
// Parse the function literal.
Scope* scope = NewScope(top_scope_, GLOBAL_SCOPE);
+ info->SetGlobalScope(scope);
if (!info->closure().is_null()) {
- scope = Scope::DeserializeScopeChain(info, scope);
+ scope = Scope::DeserializeScopeChain(info->closure()->context(), scope);
}
- LexicalScope lexical_scope(this, scope, isolate());
- ASSERT(scope->strict_mode_flag() == kNonStrictMode ||
- scope->strict_mode_flag() == info->strict_mode_flag());
- ASSERT(info->strict_mode_flag() == shared_info->strict_mode_flag());
- scope->SetStrictModeFlag(shared_info->strict_mode_flag());
- if (shared_info->qml_mode()) {
- top_scope_->EnableQmlMode();
- }
-
+ FunctionState function_state(this, scope, isolate());
+ ASSERT(scope->language_mode() != STRICT_MODE || !info->is_classic_mode());
+ ASSERT(scope->language_mode() != EXTENDED_MODE ||
+ info->is_extended_mode());
+ ASSERT(info->language_mode() == shared_info->language_mode());
+ scope->SetLanguageMode(shared_info->language_mode());
FunctionLiteral::Type type = shared_info->is_expression()
? (shared_info->is_anonymous()
? FunctionLiteral::ANONYMOUS_EXPRESSION
@@ -802,6 +761,12 @@ void Parser::ReportMessage(const char* type, Vector<const char*> args) {
}
+void Parser::ReportMessage(const char* type, Vector<Handle<String> > args) {
+ Scanner::Location source_location = scanner().location();
+ ReportMessageAt(source_location, type, args);
+}
+
+
void Parser::ReportMessageAt(Scanner::Location source_location,
const char* type,
Vector<const char*> args) {
@@ -836,10 +801,6 @@ void Parser::ReportMessageAt(Scanner::Location source_location,
isolate()->Throw(*result, &location);
}
-void Parser::SetHarmonyScoping(bool block_scoping) {
- scanner().SetHarmonyScoping(block_scoping);
- harmony_scoping_ = block_scoping;
-}
// Base class containing common code for the different finder classes used by
// the parser.
@@ -1137,32 +1098,12 @@ class ThisNamedPropertyAssignmentFinder : public ParserFinder {
};
-Statement* Parser::ParseSourceElement(ZoneStringList* labels,
- bool* ok) {
- // (Ecma 262 5th Edition, clause 14):
- // SourceElement:
- // Statement
- // FunctionDeclaration
- //
- // In harmony mode we allow additionally the following productions
- // SourceElement:
- // LetDeclaration
- // ConstDeclaration
-
- if (peek() == Token::FUNCTION) {
- return ParseFunctionDeclaration(ok);
- } else if (peek() == Token::LET || peek() == Token::CONST) {
- return ParseVariableStatement(kSourceElement, ok);
- }
- return ParseStatement(labels, ok);
-}
-
-
void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
int end_token,
+ bool is_eval,
bool* ok) {
// SourceElements ::
- // (SourceElement)* <end_token>
+ // (ModuleElement)* <end_token>
// Allocate a target stack to use for this set of source
// elements. This way, all scripts and functions get their own
@@ -1181,7 +1122,7 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
}
Scanner::Location token_loc = scanner().peek_location();
- Statement* stat = ParseSourceElement(NULL, CHECK_OK);
+ Statement* stat = ParseModuleElement(NULL, CHECK_OK);
if (stat == NULL || stat->IsEmpty()) {
directive_prologue = false; // End of directive prologue.
continue;
@@ -1189,8 +1130,8 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
if (directive_prologue) {
// A shot at a directive.
- ExpressionStatement *e_stat;
- Literal *literal;
+ ExpressionStatement* e_stat;
+ Literal* literal;
// Still processing directive prologue?
if ((e_stat = stat->AsExpressionStatement()) != NULL &&
(literal = e_stat->expression()->AsLiteral()) != NULL &&
@@ -1198,11 +1139,24 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
Handle<String> directive = Handle<String>::cast(literal->handle());
// Check "use strict" directive (ES5 14.1).
- if (!top_scope_->is_strict_mode() &&
+ if (top_scope_->is_classic_mode() &&
directive->Equals(isolate()->heap()->use_strict()) &&
token_loc.end_pos - token_loc.beg_pos ==
isolate()->heap()->use_strict()->length() + 2) {
- top_scope_->SetStrictModeFlag(kStrictMode);
+ // TODO(mstarzinger): Global strict eval calls, need their own scope
+ // as specified in ES5 10.4.2(3). The correct fix would be to always
+ // add this scope in DoParseProgram(), but that requires adaptations
+ // all over the code base, so we go with a quick-fix for now.
+ if (is_eval && !top_scope_->is_eval_scope()) {
+ ASSERT(top_scope_->is_global_scope());
+ Scope* scope = NewScope(top_scope_, EVAL_SCOPE);
+ scope->set_start_position(top_scope_->start_position());
+ scope->set_end_position(top_scope_->end_position());
+ top_scope_ = scope;
+ }
+ // TODO(ES6): Fix entering extended mode, once it is specified.
+ top_scope_->SetLanguageMode(FLAG_harmony_scoping
+ ? EXTENDED_MODE : STRICT_MODE);
// "use strict" is the only directive for now.
directive_prologue = false;
}
@@ -1226,15 +1180,394 @@ void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
this_property_assignment_finder.only_simple_this_property_assignments()
&& top_scope_->declarations()->length() == 0;
if (only_simple_this_property_assignments) {
- lexical_scope_->SetThisPropertyAssignmentInfo(
+ current_function_state_->SetThisPropertyAssignmentInfo(
only_simple_this_property_assignments,
this_property_assignment_finder.GetThisPropertyAssignments());
}
}
+
return 0;
}
+Statement* Parser::ParseModuleElement(ZoneStringList* labels,
+ bool* ok) {
+ // (Ecma 262 5th Edition, clause 14):
+ // SourceElement:
+ // Statement
+ // FunctionDeclaration
+ //
+ // In harmony mode we allow additionally the following productions
+ // ModuleElement:
+ // LetDeclaration
+ // ConstDeclaration
+ // ModuleDeclaration
+ // ImportDeclaration
+ // ExportDeclaration
+
+ switch (peek()) {
+ case Token::FUNCTION:
+ return ParseFunctionDeclaration(NULL, ok);
+ case Token::LET:
+ case Token::CONST:
+ return ParseVariableStatement(kModuleElement, NULL, ok);
+ case Token::IMPORT:
+ return ParseImportDeclaration(ok);
+ case Token::EXPORT:
+ return ParseExportDeclaration(ok);
+ default: {
+ Statement* stmt = ParseStatement(labels, CHECK_OK);
+ // Handle 'module' as a context-sensitive keyword.
+ if (FLAG_harmony_modules &&
+ peek() == Token::IDENTIFIER &&
+ !scanner().HasAnyLineTerminatorBeforeNext() &&
+ stmt != NULL) {
+ ExpressionStatement* estmt = stmt->AsExpressionStatement();
+ if (estmt != NULL &&
+ estmt->expression()->AsVariableProxy() != NULL &&
+ estmt->expression()->AsVariableProxy()->name()->Equals(
+ isolate()->heap()->module_symbol()) &&
+ !scanner().literal_contains_escapes()) {
+ return ParseModuleDeclaration(NULL, ok);
+ }
+ }
+ return stmt;
+ }
+ }
+}
+
+
+Block* Parser::ParseModuleDeclaration(ZoneStringList* names, bool* ok) {
+ // ModuleDeclaration:
+ // 'module' Identifier Module
+
+ // Create new block with one expected declaration.
+ Block* block = factory()->NewBlock(NULL, 1, true);
+ Handle<String> name = ParseIdentifier(CHECK_OK);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module %s...\n", name->ToAsciiArray());
+#endif
+
+ Module* module = ParseModule(CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(name, LET, module->interface());
+ Declaration* declaration =
+ factory()->NewModuleDeclaration(proxy, module, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module %s.\n", name->ToAsciiArray());
+
+ if (FLAG_print_interfaces) {
+ PrintF("module %s : ", name->ToAsciiArray());
+ module->interface()->Print();
+ }
+#endif
+
+ // TODO(rossberg): Add initialization statement to block.
+
+ if (names) names->Add(name);
+ return block;
+}
+
+
+Module* Parser::ParseModule(bool* ok) {
+ // Module:
+ // '{' ModuleElement '}'
+ // '=' ModulePath ';'
+ // 'at' String ';'
+
+ switch (peek()) {
+ case Token::LBRACE:
+ return ParseModuleLiteral(ok);
+
+ case Token::ASSIGN: {
+ Expect(Token::ASSIGN, CHECK_OK);
+ Module* result = ParseModulePath(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+ return result;
+ }
+
+ default: {
+ ExpectContextualKeyword("at", CHECK_OK);
+ Module* result = ParseModuleUrl(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+ return result;
+ }
+ }
+}
+
+
+Module* Parser::ParseModuleLiteral(bool* ok) {
+ // Module:
+ // '{' ModuleElement '}'
+
+ // Construct block expecting 16 statements.
+ Block* body = factory()->NewBlock(NULL, 16, false);
+#ifdef DEBUG
+ if (FLAG_print_interface_details) PrintF("# Literal ");
+#endif
+ Scope* scope = NewScope(top_scope_, MODULE_SCOPE);
+
+ Expect(Token::LBRACE, CHECK_OK);
+ scope->set_start_position(scanner().location().beg_pos);
+ scope->SetLanguageMode(EXTENDED_MODE);
+
+ {
+ BlockState block_state(this, scope);
+ TargetCollector collector;
+ Target target(&this->target_stack_, &collector);
+ Target target_body(&this->target_stack_, body);
+ InitializationBlockFinder block_finder(top_scope_, target_stack_);
+
+ while (peek() != Token::RBRACE) {
+ Statement* stat = ParseModuleElement(NULL, CHECK_OK);
+ if (stat && !stat->IsEmpty()) {
+ body->AddStatement(stat);
+ block_finder.Update(stat);
+ }
+ }
+ }
+
+ Expect(Token::RBRACE, CHECK_OK);
+ scope->set_end_position(scanner().location().end_pos);
+ body->set_block_scope(scope);
+
+ scope->interface()->Freeze(ok);
+ ASSERT(ok);
+ return factory()->NewModuleLiteral(body, scope->interface());
+}
+
+
+Module* Parser::ParseModulePath(bool* ok) {
+ // ModulePath:
+ // Identifier
+ // ModulePath '.' Identifier
+
+ Module* result = ParseModuleVariable(CHECK_OK);
+ while (Check(Token::PERIOD)) {
+ Handle<String> name = ParseIdentifierName(CHECK_OK);
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Path .%s ", name->ToAsciiArray());
+#endif
+ Module* member = factory()->NewModulePath(result, name);
+ result->interface()->Add(name, member->interface(), ok);
+ if (!*ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("PATH TYPE ERROR at '%s'\n", name->ToAsciiArray());
+ PrintF("result: ");
+ result->interface()->Print();
+ PrintF("member: ");
+ member->interface()->Print();
+ }
+#endif
+ ReportMessage("invalid_module_path", Vector<Handle<String> >(&name, 1));
+ return NULL;
+ }
+ result = member;
+ }
+
+ return result;
+}
+
+
+Module* Parser::ParseModuleVariable(bool* ok) {
+ // ModulePath:
+ // Identifier
+
+ Handle<String> name = ParseIdentifier(CHECK_OK);
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Module variable %s ", name->ToAsciiArray());
+#endif
+ VariableProxy* proxy = top_scope_->NewUnresolved(
+ factory(), name, scanner().location().beg_pos, Interface::NewModule());
+
+ return factory()->NewModuleVariable(proxy);
+}
+
+
+Module* Parser::ParseModuleUrl(bool* ok) {
+ // Module:
+ // String
+
+ Expect(Token::STRING, CHECK_OK);
+ Handle<String> symbol = GetSymbol(CHECK_OK);
+
+ // TODO(ES6): Request JS resource from environment...
+
+#ifdef DEBUG
+ if (FLAG_print_interface_details) PrintF("# Url ");
+#endif
+ return factory()->NewModuleUrl(symbol);
+}
+
+
+Module* Parser::ParseModuleSpecifier(bool* ok) {
+ // ModuleSpecifier:
+ // String
+ // ModulePath
+
+ if (peek() == Token::STRING) {
+ return ParseModuleUrl(ok);
+ } else {
+ return ParseModulePath(ok);
+ }
+}
+
+
+Block* Parser::ParseImportDeclaration(bool* ok) {
+ // ImportDeclaration:
+ // 'import' IdentifierName (',' IdentifierName)* 'from' ModuleSpecifier ';'
+ //
+ // TODO(ES6): implement destructuring ImportSpecifiers
+
+ Expect(Token::IMPORT, CHECK_OK);
+ ZoneStringList names(1);
+
+ Handle<String> name = ParseIdentifierName(CHECK_OK);
+ names.Add(name);
+ while (peek() == Token::COMMA) {
+ Consume(Token::COMMA);
+ name = ParseIdentifierName(CHECK_OK);
+ names.Add(name);
+ }
+
+ ExpectContextualKeyword("from", CHECK_OK);
+ Module* module = ParseModuleSpecifier(CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+
+ // Generate a separate declaration for each identifier.
+ // TODO(ES6): once we implement destructuring, make that one declaration.
+ Block* block = factory()->NewBlock(NULL, 1, true);
+ for (int i = 0; i < names.length(); ++i) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Import %s ", names[i]->ToAsciiArray());
+#endif
+ Interface* interface = Interface::NewUnknown();
+ module->interface()->Add(names[i], interface, ok);
+ if (!*ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("IMPORT TYPE ERROR at '%s'\n", names[i]->ToAsciiArray());
+ PrintF("module: ");
+ module->interface()->Print();
+ }
+#endif
+ ReportMessage("invalid_module_path", Vector<Handle<String> >(&name, 1));
+ return NULL;
+ }
+ VariableProxy* proxy = NewUnresolved(names[i], LET, interface);
+ Declaration* declaration =
+ factory()->NewImportDeclaration(proxy, module, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+ // TODO(rossberg): Add initialization statement to block.
+ }
+
+ return block;
+}
+
+
+Statement* Parser::ParseExportDeclaration(bool* ok) {
+ // ExportDeclaration:
+ // 'export' Identifier (',' Identifier)* ';'
+ // 'export' VariableDeclaration
+ // 'export' FunctionDeclaration
+ // 'export' ModuleDeclaration
+ //
+ // TODO(ES6): implement structuring ExportSpecifiers
+
+ Expect(Token::EXPORT, CHECK_OK);
+
+ Statement* result = NULL;
+ ZoneStringList names(1);
+ switch (peek()) {
+ case Token::IDENTIFIER: {
+ Handle<String> name = ParseIdentifier(CHECK_OK);
+ // Handle 'module' as a context-sensitive keyword.
+ if (!name->IsEqualTo(CStrVector("module"))) {
+ names.Add(name);
+ while (peek() == Token::COMMA) {
+ Consume(Token::COMMA);
+ name = ParseIdentifier(CHECK_OK);
+ names.Add(name);
+ }
+ ExpectSemicolon(CHECK_OK);
+ result = factory()->NewEmptyStatement();
+ } else {
+ result = ParseModuleDeclaration(&names, CHECK_OK);
+ }
+ break;
+ }
+
+ case Token::FUNCTION:
+ result = ParseFunctionDeclaration(&names, CHECK_OK);
+ break;
+
+ case Token::VAR:
+ case Token::LET:
+ case Token::CONST:
+ result = ParseVariableStatement(kModuleElement, &names, CHECK_OK);
+ break;
+
+ default:
+ *ok = false;
+ ReportUnexpectedToken(scanner().current_token());
+ return NULL;
+ }
+
+ // Extract declared names into export declarations and interface.
+ Interface* interface = top_scope_->interface();
+ for (int i = 0; i < names.length(); ++i) {
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Export %s ", names[i]->ToAsciiArray());
+#endif
+ Interface* inner = Interface::NewUnknown();
+ interface->Add(names[i], inner, CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(names[i], LET, inner);
+ USE(proxy);
+ // TODO(rossberg): Rethink whether we actually need to store export
+ // declarations (for compilation?).
+ // ExportDeclaration* declaration =
+ // factory()->NewExportDeclaration(proxy, top_scope_);
+ // top_scope_->AddDeclaration(declaration);
+ }
+
+ ASSERT(result != NULL);
+ return result;
+}
+
+
+Statement* Parser::ParseBlockElement(ZoneStringList* labels,
+ bool* ok) {
+ // (Ecma 262 5th Edition, clause 14):
+ // SourceElement:
+ // Statement
+ // FunctionDeclaration
+ //
+ // In harmony mode we allow additionally the following productions
+ // BlockElement (aka SourceElement):
+ // LetDeclaration
+ // ConstDeclaration
+
+ switch (peek()) {
+ case Token::FUNCTION:
+ return ParseFunctionDeclaration(NULL, ok);
+ case Token::LET:
+ case Token::CONST:
+ return ParseVariableStatement(kModuleElement, NULL, ok);
+ default:
+ return ParseStatement(labels, ok);
+ }
+}
+
+
Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
// Statement ::
// Block
@@ -1268,13 +1601,14 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
return ParseBlock(labels, ok);
case Token::CONST: // fall through
+ case Token::LET:
case Token::VAR:
- stmt = ParseVariableStatement(kStatement, ok);
+ stmt = ParseVariableStatement(kStatement, NULL, ok);
break;
case Token::SEMICOLON:
Next();
- return EmptyStatement();
+ return factory()->NewEmptyStatement();
case Token::IF:
stmt = ParseIfStatement(labels, ok);
@@ -1322,7 +1656,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
// one must take great care not to treat it as a
// fall-through. It is much easier just to wrap the entire
// try-statement in a statement block and put the labels there
- Block* result = new(zone()) Block(isolate(), labels, 1, false);
+ Block* result = factory()->NewBlock(labels, 1, false);
Target target(&this->target_stack_, result);
TryStatement* statement = ParseTryStatement(CHECK_OK);
if (statement) {
@@ -1340,13 +1674,13 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
// FunctionDeclaration
// Common language extension is to allow function declaration in place
// of any statement. This language extension is disabled in strict mode.
- if (top_scope_->is_strict_mode() || harmony_scoping_) {
+ if (!top_scope_->is_classic_mode()) {
ReportMessageAt(scanner().peek_location(), "strict_function",
Vector<const char*>::empty());
*ok = false;
return NULL;
}
- return ParseFunctionDeclaration(ok);
+ return ParseFunctionDeclaration(NULL, ok);
}
case Token::DEBUGGER:
@@ -1363,19 +1697,24 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
}
-VariableProxy* Parser::Declare(Handle<String> name,
- VariableMode mode,
- FunctionLiteral* fun,
- bool resolve,
- bool* ok) {
- Variable* var = NULL;
+VariableProxy* Parser::NewUnresolved(
+ Handle<String> name, VariableMode mode, Interface* interface) {
// If we are inside a function, a declaration of a var/const variable is a
// truly local variable, and the scope of the variable is always the function
// scope.
// Let/const variables in harmony mode are always added to the immediately
// enclosing scope.
- Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
- ? top_scope_ : top_scope_->DeclarationScope();
+ return DeclarationScope(mode)->NewUnresolved(
+ factory(), name, scanner().location().beg_pos, interface);
+}
+
+
+void Parser::Declare(Declaration* declaration, bool resolve, bool* ok) {
+ VariableProxy* proxy = declaration->proxy();
+ Handle<String> name = proxy->name();
+ VariableMode mode = declaration->mode();
+ Scope* declaration_scope = DeclarationScope(mode);
+ Variable* var = NULL;
// If a function scope exists, then we can statically declare this
// variable and also set its mode. In any case, a Declaration node
@@ -1388,13 +1727,16 @@ VariableProxy* Parser::Declare(Handle<String> name,
// Also for block scoped let/const bindings the variable can be
// statically declared.
if (declaration_scope->is_function_scope() ||
- declaration_scope->is_strict_mode_eval_scope() ||
- declaration_scope->is_block_scope()) {
+ declaration_scope->is_strict_or_extended_eval_scope() ||
+ declaration_scope->is_block_scope() ||
+ declaration_scope->is_module_scope() ||
+ declaration->AsModuleDeclaration() != NULL) {
// Declare the variable in the function scope.
var = declaration_scope->LocalLookup(name);
if (var == NULL) {
// Declare the name.
- var = declaration_scope->DeclareLocal(name, mode);
+ var = declaration_scope->DeclareLocal(
+ name, mode, declaration->initialization(), proxy->interface());
} else {
// The name was declared in this scope before; check for conflicting
// re-declarations. We have a conflict if either of the declarations is
@@ -1413,7 +1755,7 @@ VariableProxy* Parser::Declare(Handle<String> name,
var->mode() == CONST ||
var->mode() == CONST_HARMONY ||
var->mode() == LET);
- if (harmony_scoping_) {
+ if (is_extended_mode()) {
// In harmony mode we treat re-declarations as early errors. See
// ES5 16 for a definition of early errors.
SmartArrayPointer<char> c_string = name->ToCString(DISALLOW_NULLS);
@@ -1421,7 +1763,7 @@ VariableProxy* Parser::Declare(Handle<String> name,
Vector<const char*> args(elms, 2);
ReportMessage("redeclaration", args);
*ok = false;
- return NULL;
+ return;
}
const char* type = (var->mode() == VAR)
? "var" : var->is_const_mode() ? "const" : "let";
@@ -1451,17 +1793,33 @@ VariableProxy* Parser::Declare(Handle<String> name,
// semantic issue as long as we keep the source order, but it may be
// a performance issue since it may lead to repeated
// Runtime::DeclareContextSlot() calls.
- VariableProxy* proxy = declaration_scope->NewUnresolved(
- name, scanner().location().beg_pos);
- declaration_scope->AddDeclaration(
- new(zone()) Declaration(proxy, mode, fun, top_scope_));
+ declaration_scope->AddDeclaration(declaration);
- // For global const variables we bind the proxy to a variable.
if ((mode == CONST || mode == CONST_HARMONY) &&
declaration_scope->is_global_scope()) {
+ // For global const variables we bind the proxy to a variable.
ASSERT(resolve); // should be set by all callers
Variable::Kind kind = Variable::NORMAL;
- var = new(zone()) Variable(declaration_scope, name, CONST, true, kind);
+ var = new(zone()) Variable(declaration_scope,
+ name,
+ mode,
+ true,
+ kind,
+ kNeedsInitialization);
+ } else if (declaration_scope->is_eval_scope() &&
+ declaration_scope->is_classic_mode()) {
+ // For variable declarations in a non-strict eval scope the proxy is bound
+ // to a lookup variable to force a dynamic declaration using the
+ // DeclareContextSlot runtime function.
+ Variable::Kind kind = Variable::NORMAL;
+ var = new(zone()) Variable(declaration_scope,
+ name,
+ mode,
+ true,
+ kind,
+ declaration->initialization());
+ var->AllocateTo(Variable::LOOKUP, -1);
+ resolve = true;
}
// If requested and we have a local variable, bind the proxy to the variable
@@ -1488,9 +1846,30 @@ VariableProxy* Parser::Declare(Handle<String> name,
// initialization code. Thus, inside the 'with' statement, we need
// both access to the static and the dynamic context chain; the
// runtime needs to provide both.
- if (resolve && var != NULL) proxy->BindTo(var);
+ if (resolve && var != NULL) {
+ proxy->BindTo(var);
- return proxy;
+ if (FLAG_harmony_modules) {
+ bool ok;
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Declare %s\n", var->name()->ToAsciiArray());
+#endif
+ proxy->interface()->Unify(var->interface(), &ok);
+ if (!ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("DECLARE TYPE ERROR\n");
+ PrintF("proxy: ");
+ proxy->interface()->Print();
+ PrintF("var: ");
+ var->interface()->Print();
+ }
+#endif
+ ReportMessage("module_type_error", Vector<Handle<String> >(&name, 1));
+ }
+ }
+ }
}
@@ -1517,7 +1896,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// isn't lazily compiled. The extension structures are only
// accessible while parsing the first time not when reparsing
// because of lazy compilation.
- top_scope_->DeclarationScope()->ForceEagerCompilation();
+ DeclarationScope(VAR)->ForceEagerCompilation();
// Compute the function template for the native function.
v8::Handle<v8::FunctionTemplate> fun_template =
@@ -1531,7 +1910,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
Handle<SharedFunctionInfo> shared =
isolate()->factory()->NewSharedFunctionInfo(name, literals, code,
- Handle<SerializedScopeInfo>(fun->shared()->scope_info()));
+ Handle<ScopeInfo>(fun->shared()->scope_info()));
shared->set_construct_stub(*construct_stub);
// Copy the function data to the shared function info.
@@ -1541,16 +1920,20 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
// TODO(1240846): It's weird that native function declarations are
// introduced dynamically when we meet their declarations, whereas
- // other functions are setup when entering the surrounding scope.
+ // other functions are set up when entering the surrounding scope.
+ VariableProxy* proxy = NewUnresolved(name, VAR);
+ Declaration* declaration =
+ factory()->NewVariableDeclaration(proxy, VAR, top_scope_);
+ Declare(declaration, true, CHECK_OK);
SharedFunctionInfoLiteral* lit =
- new(zone()) SharedFunctionInfoLiteral(isolate(), shared);
- VariableProxy* var = Declare(name, VAR, NULL, true, CHECK_OK);
- return new(zone()) ExpressionStatement(new(zone()) Assignment(
- isolate(), Token::INIT_VAR, var, lit, RelocInfo::kNoPosition));
+ factory()->NewSharedFunctionInfoLiteral(shared);
+ return factory()->NewExpressionStatement(
+ factory()->NewAssignment(
+ Token::INIT_VAR, proxy, lit, RelocInfo::kNoPosition));
}
-Statement* Parser::ParseFunctionDeclaration(bool* ok) {
+Statement* Parser::ParseFunctionDeclaration(ZoneStringList* names, bool* ok) {
// FunctionDeclaration ::
// 'function' Identifier '(' FormalParameterListopt ')' '{' FunctionBody '}'
Expect(Token::FUNCTION, CHECK_OK);
@@ -1566,14 +1949,18 @@ Statement* Parser::ParseFunctionDeclaration(bool* ok) {
// Even if we're not at the top-level of the global or a function
// scope, we treat is as such and introduce the function with it's
// initial value upon entering the corresponding scope.
- VariableMode mode = harmony_scoping_ ? LET : VAR;
- Declare(name, mode, fun, true, CHECK_OK);
- return EmptyStatement();
+ VariableMode mode = is_extended_mode() ? LET : VAR;
+ VariableProxy* proxy = NewUnresolved(name, mode);
+ Declaration* declaration =
+ factory()->NewFunctionDeclaration(proxy, mode, fun, top_scope_);
+ Declare(declaration, true, CHECK_OK);
+ if (names) names->Add(name);
+ return factory()->NewEmptyStatement();
}
Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
- if (harmony_scoping_) return ParseScopedBlock(labels, ok);
+ if (top_scope_->is_extended_mode()) return ParseScopedBlock(labels, ok);
// Block ::
// '{' Statement* '}'
@@ -1582,7 +1969,7 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
// (ECMA-262, 3rd, 12.2)
//
// Construct block expecting 16 statements.
- Block* result = new(zone()) Block(isolate(), labels, 16, false);
+ Block* result = factory()->NewBlock(labels, 16, false);
Target target(&this->target_stack_, result);
Expect(Token::LBRACE, CHECK_OK);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
@@ -1599,26 +1986,26 @@ Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
- // The harmony mode uses source elements instead of statements.
+ // The harmony mode uses block elements instead of statements.
//
// Block ::
- // '{' SourceElement* '}'
+ // '{' BlockElement* '}'
// Construct block expecting 16 statements.
- Block* body = new(zone()) Block(isolate(), labels, 16, false);
+ Block* body = factory()->NewBlock(labels, 16, false);
Scope* block_scope = NewScope(top_scope_, BLOCK_SCOPE);
// Parse the statements and collect escaping labels.
Expect(Token::LBRACE, CHECK_OK);
block_scope->set_start_position(scanner().location().beg_pos);
- { SaveScope save_scope(this, block_scope);
+ { BlockState block_state(this, block_scope);
TargetCollector collector;
Target target(&this->target_stack_, &collector);
Target target_body(&this->target_stack_, body);
InitializationBlockFinder block_finder(top_scope_, target_stack_);
while (peek() != Token::RBRACE) {
- Statement* stat = ParseSourceElement(NULL, CHECK_OK);
+ Statement* stat = ParseBlockElement(NULL, CHECK_OK);
if (stat && !stat->IsEmpty()) {
body->AddStatement(stat);
block_finder.Update(stat);
@@ -1634,15 +2021,14 @@ Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context,
+ ZoneStringList* names,
bool* ok) {
// VariableStatement ::
// VariableDeclarations ';'
Handle<String> ignore;
- Block* result = ParseVariableDeclarations(var_context,
- NULL,
- &ignore,
- CHECK_OK);
+ Block* result =
+ ParseVariableDeclarations(var_context, NULL, names, &ignore, CHECK_OK);
ExpectSemicolon(CHECK_OK);
return result;
}
@@ -1655,13 +2041,14 @@ bool Parser::IsEvalOrArguments(Handle<String> string) {
// If the variable declaration declares exactly one non-const
-// variable, then *var is set to that variable. In all other cases,
-// *var is untouched; in particular, it is the caller's responsibility
+// variable, then *out is set to that variable. In all other cases,
+// *out is untouched; in particular, it is the caller's responsibility
// to initialize it properly. This mechanism is used for the parsing
// of 'for-in' loops.
Block* Parser::ParseVariableDeclarations(
VariableDeclarationContext var_context,
VariableDeclarationProperties* decl_props,
+ ZoneStringList* names,
Handle<String>* out,
bool* ok) {
// VariableDeclarations ::
@@ -1688,34 +2075,54 @@ Block* Parser::ParseVariableDeclarations(
if (peek() == Token::VAR) {
Consume(Token::VAR);
} else if (peek() == Token::CONST) {
+ // TODO(ES6): The ES6 Draft Rev4 section 12.2.2 reads:
+ //
+ // ConstDeclaration : const ConstBinding (',' ConstBinding)* ';'
+ //
+ // * It is a Syntax Error if the code that matches this production is not
+ // contained in extended code.
+ //
+ // However disallowing const in classic mode will break compatibility with
+ // existing pages. Therefore we keep allowing const with the old
+ // non-harmony semantics in classic mode.
Consume(Token::CONST);
- if (harmony_scoping_) {
- if (var_context != kSourceElement &&
- var_context != kForStatement) {
- // In harmony mode 'const' declarations are only allowed in source
- // element positions.
- ReportMessage("unprotected_const", Vector<const char*>::empty());
+ switch (top_scope_->language_mode()) {
+ case CLASSIC_MODE:
+ mode = CONST;
+ init_op = Token::INIT_CONST;
+ break;
+ case STRICT_MODE:
+ ReportMessage("strict_const", Vector<const char*>::empty());
*ok = false;
return NULL;
- }
- mode = CONST_HARMONY;
- init_op = Token::INIT_CONST_HARMONY;
- } else if (top_scope_->is_strict_mode()) {
- ReportMessage("strict_const", Vector<const char*>::empty());
- *ok = false;
- return NULL;
- } else {
- mode = CONST;
- init_op = Token::INIT_CONST;
+ case EXTENDED_MODE:
+ if (var_context == kStatement) {
+ // In extended mode 'const' declarations are only allowed in source
+ // element positions.
+ ReportMessage("unprotected_const", Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
+ mode = CONST_HARMONY;
+ init_op = Token::INIT_CONST_HARMONY;
}
is_const = true;
needs_init = true;
} else if (peek() == Token::LET) {
+ // ES6 Draft Rev4 section 12.2.1:
+ //
+ // LetDeclaration : let LetBindingList ;
+ //
+ // * It is a Syntax Error if the code that matches this production is not
+ // contained in extended code.
+ if (!is_extended_mode()) {
+ ReportMessage("illegal_let", Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
Consume(Token::LET);
- if (var_context != kSourceElement &&
- var_context != kForStatement) {
+ if (var_context == kStatement) {
// Let declarations are only allowed in source element positions.
- ASSERT(var_context == kStatement);
ReportMessage("unprotected_let", Vector<const char*>::empty());
*ok = false;
return NULL;
@@ -1727,8 +2134,8 @@ Block* Parser::ParseVariableDeclarations(
UNREACHABLE(); // by current callers
}
- Scope* declaration_scope = (mode == LET || mode == CONST_HARMONY)
- ? top_scope_ : top_scope_->DeclarationScope();
+ Scope* declaration_scope = DeclarationScope(mode);
+
// The scope of a var/const declared variable anywhere inside a function
// is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
// transform a source-level var/const declaration into a (Function)
@@ -1742,7 +2149,7 @@ Block* Parser::ParseVariableDeclarations(
// is inside an initializer block, it is ignored.
//
// Create new block with one expected declaration.
- Block* block = new(zone()) Block(isolate(), NULL, 1, true);
+ Block* block = factory()->NewBlock(NULL, 1, true);
int nvars = 0; // the number of variables declared
Handle<String> name;
do {
@@ -1754,7 +2161,7 @@ Block* Parser::ParseVariableDeclarations(
if (fni_ != NULL) fni_->PushVariableName(name);
// Strict mode variables may not be named eval or arguments
- if (declaration_scope->is_strict_mode() && IsEvalOrArguments(name)) {
+ if (!declaration_scope->is_classic_mode() && IsEvalOrArguments(name)) {
ReportMessage("strict_var_name", Vector<const char*>::empty());
*ok = false;
return NULL;
@@ -1775,7 +2182,10 @@ Block* Parser::ParseVariableDeclarations(
// For let/const declarations in harmony mode, we can also immediately
// pre-resolve the proxy because it resides in the same scope as the
// declaration.
- Declare(name, mode, NULL, mode != VAR, CHECK_OK);
+ VariableProxy* proxy = NewUnresolved(name, mode);
+ Declaration* declaration =
+ factory()->NewVariableDeclaration(proxy, mode, top_scope_);
+ Declare(declaration, mode != VAR, CHECK_OK);
nvars++;
if (declaration_scope->num_var_or_const() > kMaxNumFunctionLocals) {
ReportMessageAt(scanner().location(), "too_many_variables",
@@ -1783,6 +2193,7 @@ Block* Parser::ParseVariableDeclarations(
*ok = false;
return NULL;
}
+ if (names) names->Add(name);
// Parse initialization expression if present and/or needed. A
// declaration of the form:
@@ -1830,6 +2241,11 @@ Block* Parser::ParseVariableDeclarations(
if (decl_props != NULL) *decl_props = kHasInitializers;
}
+ // Record the end position of the initializer.
+ if (proxy->var() != NULL) {
+ proxy->var()->set_initializer_position(scanner().location().end_pos);
+ }
+
// Make sure that 'const x' and 'let x' initialize 'x' to undefined.
if (value == NULL && needs_init) {
value = GetLiteralUndefined();
@@ -1858,38 +2274,26 @@ Block* Parser::ParseVariableDeclarations(
// Compute the arguments for the runtime call.
ZoneList<Expression*>* arguments = new(zone()) ZoneList<Expression*>(3);
// We have at least 1 parameter.
- arguments->Add(NewLiteral(name));
+ arguments->Add(factory()->NewLiteral(name));
CallRuntime* initialize;
if (is_const) {
arguments->Add(value);
value = NULL; // zap the value to avoid the unnecessary assignment
- int qml_mode = 0;
- if (top_scope_->is_qml_mode() && !Isolate::Current()->global()->HasProperty(*name))
- qml_mode = 1;
- arguments->Add(NewNumberLiteral(qml_mode));
-
// Construct the call to Runtime_InitializeConstGlobal
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (1 or 2).
- initialize =
- new(zone()) CallRuntime(
- isolate(),
- isolate()->factory()->InitializeConstGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
- arguments);
+ initialize = factory()->NewCallRuntime(
+ isolate()->factory()->InitializeConstGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeConstGlobal),
+ arguments);
} else {
// Add strict mode.
// We may want to pass singleton to avoid Literal allocations.
- StrictModeFlag flag = initialization_scope->strict_mode_flag();
- arguments->Add(NewNumberLiteral(flag));
-
- int qml_mode = 0;
- if (top_scope_->is_qml_mode() && !Isolate::Current()->global()->HasProperty(*name))
- qml_mode = 1;
- arguments->Add(NewNumberLiteral(qml_mode));
+ LanguageMode language_mode = initialization_scope->language_mode();
+ arguments->Add(factory()->NewNumberLiteral(language_mode));
// Be careful not to assign a value to the global variable if
// we're in a with. The initialization value should not
@@ -1904,35 +2308,42 @@ Block* Parser::ParseVariableDeclarations(
// and add it to the initialization statement block.
// Note that the function does different things depending on
// the number of arguments (2 or 3).
- initialize =
- new(zone()) CallRuntime(
- isolate(),
- isolate()->factory()->InitializeVarGlobal_symbol(),
- Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
- arguments);
+ initialize = factory()->NewCallRuntime(
+ isolate()->factory()->InitializeVarGlobal_symbol(),
+ Runtime::FunctionForId(Runtime::kInitializeVarGlobal),
+ arguments);
}
- block->AddStatement(new(zone()) ExpressionStatement(initialize));
+ block->AddStatement(factory()->NewExpressionStatement(initialize));
+ } else if (needs_init) {
+ // Constant initializations always assign to the declared constant which
+ // is always at the function scope level. This is only relevant for
+ // dynamically looked-up variables and constants (the start context for
+ // constant lookups is always the function context, while it is the top
+ // context for var declared variables). Sigh...
+ // For 'let' and 'const' declared variables in harmony mode the
+ // initialization also always assigns to the declared variable.
+ ASSERT(proxy != NULL);
+ ASSERT(proxy->var() != NULL);
+ ASSERT(value != NULL);
+ Assignment* assignment =
+ factory()->NewAssignment(init_op, proxy, value, position);
+ block->AddStatement(factory()->NewExpressionStatement(assignment));
+ value = NULL;
}
// Add an assignment node to the initialization statement block if we still
- // have a pending initialization value. We must distinguish between
- // different kinds of declarations: 'var' initializations are simply
- // assignments (with all the consequences if they are inside a 'with'
- // statement - they may change a 'with' object property). Constant
- // initializations always assign to the declared constant which is
- // always at the function scope level. This is only relevant for
- // dynamically looked-up variables and constants (the start context
- // for constant lookups is always the function context, while it is
- // the top context for var declared variables). Sigh...
- // For 'let' and 'const' declared variables in harmony mode the
- // initialization is in the same scope as the declaration. Thus dynamic
- // lookups are unnecessary even if the block scope is inside a with.
+ // have a pending initialization value.
if (value != NULL) {
- VariableProxy* proxy = initialization_scope->NewUnresolved(name);
+ ASSERT(mode == VAR);
+ // 'var' initializations are simply assignments (with all the consequences
+ // if they are inside a 'with' statement - they may change a 'with' object
+ // property).
+ VariableProxy* proxy =
+ initialization_scope->NewUnresolved(factory(), name);
Assignment* assignment =
- new(zone()) Assignment(isolate(), init_op, proxy, value, position);
- block->AddStatement(new(zone()) ExpressionStatement(assignment));
+ factory()->NewAssignment(init_op, proxy, value, position);
+ block->AddStatement(factory()->NewExpressionStatement(assignment));
}
if (fni_ != NULL) fni_->Leave();
@@ -2010,9 +2421,18 @@ Statement* Parser::ParseExpressionOrLabelledStatement(ZoneStringList* labels,
return ParseNativeDeclaration(ok);
}
- // Parsed expression statement.
- ExpectSemicolon(CHECK_OK);
- return new(zone()) ExpressionStatement(expr);
+ // Parsed expression statement, or the context-sensitive 'module' keyword.
+ // Only expect semicolon in the former case.
+ if (!FLAG_harmony_modules ||
+ peek() != Token::IDENTIFIER ||
+ scanner().HasAnyLineTerminatorBeforeNext() ||
+ expr->AsVariableProxy() == NULL ||
+ !expr->AsVariableProxy()->name()->Equals(
+ isolate()->heap()->module_symbol()) ||
+ scanner().literal_contains_escapes()) {
+ ExpectSemicolon(CHECK_OK);
+ }
+ return factory()->NewExpressionStatement(expr);
}
@@ -2030,10 +2450,9 @@ IfStatement* Parser::ParseIfStatement(ZoneStringList* labels, bool* ok) {
Next();
else_statement = ParseStatement(labels, CHECK_OK);
} else {
- else_statement = EmptyStatement();
+ else_statement = factory()->NewEmptyStatement();
}
- return new(zone()) IfStatement(
- isolate(), condition, then_statement, else_statement);
+ return factory()->NewIfStatement(condition, then_statement, else_statement);
}
@@ -2063,7 +2482,7 @@ Statement* Parser::ParseContinueStatement(bool* ok) {
return NULL;
}
ExpectSemicolon(CHECK_OK);
- return new(zone()) ContinueStatement(target);
+ return factory()->NewContinueStatement(target);
}
@@ -2081,7 +2500,8 @@ Statement* Parser::ParseBreakStatement(ZoneStringList* labels, bool* ok) {
// Parse labeled break statements that target themselves into
// empty statements, e.g. 'l1: l2: l3: break l2;'
if (!label.is_null() && ContainsLabel(labels, label)) {
- return EmptyStatement();
+ ExpectSemicolon(CHECK_OK);
+ return factory()->NewEmptyStatement();
}
BreakableStatement* target = NULL;
target = LookupBreakTarget(label, CHECK_OK);
@@ -2098,7 +2518,7 @@ Statement* Parser::ParseBreakStatement(ZoneStringList* labels, bool* ok) {
return NULL;
}
ExpectSemicolon(CHECK_OK);
- return new(zone()) BreakStatement(target);
+ return factory()->NewBreakStatement(target);
}
@@ -2111,6 +2531,20 @@ Statement* Parser::ParseReturnStatement(bool* ok) {
// reported (underlining).
Expect(Token::RETURN, CHECK_OK);
+ Token::Value tok = peek();
+ Statement* result;
+ if (scanner().HasAnyLineTerminatorBeforeNext() ||
+ tok == Token::SEMICOLON ||
+ tok == Token::RBRACE ||
+ tok == Token::EOS) {
+ ExpectSemicolon(CHECK_OK);
+ result = factory()->NewReturnStatement(GetLiteralUndefined());
+ } else {
+ Expression* expr = ParseExpression(true, CHECK_OK);
+ ExpectSemicolon(CHECK_OK);
+ result = factory()->NewReturnStatement(expr);
+ }
+
// An ECMAScript program is considered syntactically incorrect if it
// contains a return statement that is not within the body of a
// function. See ECMA-262, section 12.9, page 67.
@@ -2121,21 +2555,9 @@ Statement* Parser::ParseReturnStatement(bool* ok) {
declaration_scope->is_eval_scope()) {
Handle<String> type = isolate()->factory()->illegal_return_symbol();
Expression* throw_error = NewThrowSyntaxError(type, Handle<Object>::null());
- return new(zone()) ExpressionStatement(throw_error);
- }
-
- Token::Value tok = peek();
- if (scanner().HasAnyLineTerminatorBeforeNext() ||
- tok == Token::SEMICOLON ||
- tok == Token::RBRACE ||
- tok == Token::EOS) {
- ExpectSemicolon(CHECK_OK);
- return new(zone()) ReturnStatement(GetLiteralUndefined());
+ return factory()->NewExpressionStatement(throw_error);
}
-
- Expression* expr = ParseExpression(true, CHECK_OK);
- ExpectSemicolon(CHECK_OK);
- return new(zone()) ReturnStatement(expr);
+ return result;
}
@@ -2145,7 +2567,7 @@ Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) {
Expect(Token::WITH, CHECK_OK);
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
ReportMessage("strict_mode_with", Vector<const char*>::empty());
*ok = false;
return NULL;
@@ -2158,12 +2580,12 @@ Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) {
top_scope_->DeclarationScope()->RecordWithStatement();
Scope* with_scope = NewScope(top_scope_, WITH_SCOPE);
Statement* stmt;
- { SaveScope save_scope(this, with_scope);
+ { BlockState block_state(this, with_scope);
with_scope->set_start_position(scanner().peek_location().beg_pos);
stmt = ParseStatement(labels, CHECK_OK);
with_scope->set_end_position(scanner().location().end_pos);
}
- return new(zone()) WithStatement(expr, stmt);
+ return factory()->NewWithStatement(expr, stmt);
}
@@ -2205,7 +2627,7 @@ SwitchStatement* Parser::ParseSwitchStatement(ZoneStringList* labels,
// SwitchStatement ::
// 'switch' '(' Expression ')' '{' CaseClause* '}'
- SwitchStatement* statement = new(zone()) SwitchStatement(isolate(), labels);
+ SwitchStatement* statement = factory()->NewSwitchStatement(labels);
Target target(&this->target_stack_, statement);
Expect(Token::SWITCH, CHECK_OK);
@@ -2241,8 +2663,7 @@ Statement* Parser::ParseThrowStatement(bool* ok) {
Expression* exception = ParseExpression(true, CHECK_OK);
ExpectSemicolon(CHECK_OK);
- return new(zone()) ExpressionStatement(
- new(zone()) Throw(isolate(), exception, pos));
+ return factory()->NewExpressionStatement(factory()->NewThrow(exception, pos));
}
@@ -2291,7 +2712,7 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
catch_scope->set_start_position(scanner().location().beg_pos);
name = ParseIdentifier(CHECK_OK);
- if (top_scope_->is_strict_mode() && IsEvalOrArguments(name)) {
+ if (!top_scope_->is_classic_mode() && IsEvalOrArguments(name)) {
ReportMessage("strict_catch_variable", Vector<const char*>::empty());
*ok = false;
return NULL;
@@ -2301,10 +2722,11 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
if (peek() == Token::LBRACE) {
Target target(&this->target_stack_, &catch_collector);
- VariableMode mode = harmony_scoping_ ? LET : VAR;
- catch_variable = catch_scope->DeclareLocal(name, mode);
+ VariableMode mode = is_extended_mode() ? LET : VAR;
+ catch_variable =
+ catch_scope->DeclareLocal(name, mode, kCreatedInitialized);
- SaveScope save_scope(this, catch_scope);
+ BlockState block_state(this, catch_scope);
catch_block = ParseBlock(NULL, CHECK_OK);
} else {
Expect(Token::LBRACE, CHECK_OK);
@@ -2327,13 +2749,11 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
if (catch_block != NULL && finally_block != NULL) {
// If we have both, create an inner try/catch.
ASSERT(catch_scope != NULL && catch_variable != NULL);
- TryCatchStatement* statement =
- new(zone()) TryCatchStatement(try_block,
- catch_scope,
- catch_variable,
- catch_block);
+ int index = current_function_state_->NextHandlerIndex();
+ TryCatchStatement* statement = factory()->NewTryCatchStatement(
+ index, try_block, catch_scope, catch_variable, catch_block);
statement->set_escaping_targets(try_collector.targets());
- try_block = new(zone()) Block(isolate(), NULL, 1, false);
+ try_block = factory()->NewBlock(NULL, 1, false);
try_block->AddStatement(statement);
catch_block = NULL; // Clear to indicate it's been handled.
}
@@ -2342,14 +2762,13 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
if (catch_block != NULL) {
ASSERT(finally_block == NULL);
ASSERT(catch_scope != NULL && catch_variable != NULL);
- result =
- new(zone()) TryCatchStatement(try_block,
- catch_scope,
- catch_variable,
- catch_block);
+ int index = current_function_state_->NextHandlerIndex();
+ result = factory()->NewTryCatchStatement(
+ index, try_block, catch_scope, catch_variable, catch_block);
} else {
ASSERT(finally_block != NULL);
- result = new(zone()) TryFinallyStatement(try_block, finally_block);
+ int index = current_function_state_->NextHandlerIndex();
+ result = factory()->NewTryFinallyStatement(index, try_block, finally_block);
// Combine the jump targets of the try block and the possible catch block.
try_collector.targets()->AddAll(*catch_collector.targets());
}
@@ -2364,7 +2783,7 @@ DoWhileStatement* Parser::ParseDoWhileStatement(ZoneStringList* labels,
// DoStatement ::
// 'do' Statement 'while' '(' Expression ')' ';'
- DoWhileStatement* loop = new(zone()) DoWhileStatement(isolate(), labels);
+ DoWhileStatement* loop = factory()->NewDoWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::DO, CHECK_OK);
@@ -2395,7 +2814,7 @@ WhileStatement* Parser::ParseWhileStatement(ZoneStringList* labels, bool* ok) {
// WhileStatement ::
// 'while' '(' Expression ')' Statement
- WhileStatement* loop = new(zone()) WhileStatement(isolate(), labels);
+ WhileStatement* loop = factory()->NewWhileStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::WHILE, CHECK_OK);
@@ -2427,11 +2846,11 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
if (peek() == Token::VAR || peek() == Token::CONST) {
Handle<String> name;
Block* variable_statement =
- ParseVariableDeclarations(kForStatement, NULL, &name, CHECK_OK);
+ ParseVariableDeclarations(kForStatement, NULL, NULL, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
- VariableProxy* each = top_scope_->NewUnresolved(name);
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@@ -2440,7 +2859,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Statement* body = ParseStatement(NULL, CHECK_OK);
loop->Initialize(each, enumerable, body);
- Block* result = new(zone()) Block(isolate(), NULL, 2, false);
+ Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(variable_statement);
result->AddStatement(loop);
top_scope_ = saved_scope;
@@ -2456,10 +2875,8 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Handle<String> name;
VariableDeclarationProperties decl_props = kHasNoInitializers;
Block* variable_statement =
- ParseVariableDeclarations(kForStatement,
- &decl_props,
- &name,
- CHECK_OK);
+ ParseVariableDeclarations(kForStatement, &decl_props, NULL, &name,
+ CHECK_OK);
bool accept_IN = !name.is_null() && decl_props != kHasInitializers;
if (peek() == Token::IN && accept_IN) {
// Rewrite a for-in statement of the form
@@ -2478,9 +2895,9 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// TODO(keuchel): Move the temporary variable to the block scope, after
// implementing stack allocated block scoped variables.
Variable* temp = top_scope_->DeclarationScope()->NewTemporary(name);
- VariableProxy* temp_proxy = new(zone()) VariableProxy(isolate(), temp);
- VariableProxy* each = top_scope_->NewUnresolved(name);
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ VariableProxy* temp_proxy = factory()->NewVariableProxy(temp);
+ VariableProxy* each = top_scope_->NewUnresolved(factory(), name);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@@ -2488,14 +2905,11 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Expect(Token::RPAREN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK);
- Block* body_block = new(zone()) Block(isolate(), NULL, 3, false);
- Assignment* assignment = new(zone()) Assignment(isolate(),
- Token::ASSIGN,
- each,
- temp_proxy,
- RelocInfo::kNoPosition);
+ Block* body_block = factory()->NewBlock(NULL, 3, false);
+ Assignment* assignment = factory()->NewAssignment(
+ Token::ASSIGN, each, temp_proxy, RelocInfo::kNoPosition);
Statement* assignment_statement =
- new(zone()) ExpressionStatement(assignment);
+ factory()->NewExpressionStatement(assignment);
body_block->AddStatement(variable_statement);
body_block->AddStatement(assignment_statement);
body_block->AddStatement(body);
@@ -2522,7 +2936,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
isolate()->factory()->invalid_lhs_in_for_in_symbol();
expression = NewThrowReferenceError(type);
}
- ForInStatement* loop = new(zone()) ForInStatement(isolate(), labels);
+ ForInStatement* loop = factory()->NewForInStatement(labels);
Target target(&this->target_stack_, loop);
Expect(Token::IN, CHECK_OK);
@@ -2539,13 +2953,13 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
return loop;
} else {
- init = new(zone()) ExpressionStatement(expression);
+ init = factory()->NewExpressionStatement(expression);
}
}
}
// Standard 'for' loop
- ForStatement* loop = new(zone()) ForStatement(isolate(), labels);
+ ForStatement* loop = factory()->NewForStatement(labels);
Target target(&this->target_stack_, loop);
// Parsed initializer at this point.
@@ -2560,7 +2974,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
Statement* next = NULL;
if (peek() != Token::RPAREN) {
Expression* exp = ParseExpression(true, CHECK_OK);
- next = new(zone()) ExpressionStatement(exp);
+ next = factory()->NewExpressionStatement(exp);
}
Expect(Token::RPAREN, CHECK_OK);
@@ -2580,7 +2994,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
// for (; c; n) b
// }
ASSERT(init != NULL);
- Block* result = new(zone()) Block(isolate(), NULL, 2, false);
+ Block* result = factory()->NewBlock(NULL, 2, false);
result->AddStatement(init);
result->AddStatement(loop);
result->set_block_scope(for_scope);
@@ -2604,8 +3018,8 @@ Expression* Parser::ParseExpression(bool accept_IN, bool* ok) {
Expect(Token::COMMA, CHECK_OK);
int position = scanner().location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
- result = new(zone()) BinaryOperation(
- isolate(), Token::COMMA, result, right, position);
+ result =
+ factory()->NewBinaryOperation(Token::COMMA, result, right, position);
}
return result;
}
@@ -2636,10 +3050,11 @@ Expression* Parser::ParseAssignmentExpression(bool accept_IN, bool* ok) {
expression = NewThrowReferenceError(type);
}
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
// Assignment to eval or arguments is disallowed in strict mode.
CheckStrictModeLValue(expression, "strict_lhs_assignment", CHECK_OK);
}
+ MarkAsLValue(expression);
Token::Value op = Next(); // Get assignment operator.
int pos = scanner().location().beg_pos;
@@ -2655,13 +3070,13 @@ Expression* Parser::ParseAssignmentExpression(bool accept_IN, bool* ok) {
property != NULL &&
property->obj()->AsVariableProxy() != NULL &&
property->obj()->AsVariableProxy()->is_this()) {
- lexical_scope_->AddProperty();
+ current_function_state_->AddProperty();
}
// If we assign a function literal to a property we pretenure the
// literal so it can be added as a constant function property.
if (property != NULL && right->AsFunctionLiteral() != NULL) {
- right->AsFunctionLiteral()->set_pretenure(true);
+ right->AsFunctionLiteral()->set_pretenure();
}
if (fni_ != NULL) {
@@ -2679,7 +3094,7 @@ Expression* Parser::ParseAssignmentExpression(bool accept_IN, bool* ok) {
fni_->Leave();
}
- return new(zone()) Assignment(isolate(), op, expression, right, pos);
+ return factory()->NewAssignment(op, expression, right, pos);
}
@@ -2701,8 +3116,8 @@ Expression* Parser::ParseConditionalExpression(bool accept_IN, bool* ok) {
Expect(Token::COLON, CHECK_OK);
int right_position = scanner().peek_location().beg_pos;
Expression* right = ParseAssignmentExpression(accept_IN, CHECK_OK);
- return new(zone()) Conditional(
- isolate(), expression, left, right, left_position, right_position);
+ return factory()->NewConditional(
+ expression, left, right, left_position, right_position);
}
@@ -2733,41 +3148,47 @@ Expression* Parser::ParseBinaryExpression(int prec, bool accept_IN, bool* ok) {
switch (op) {
case Token::ADD:
- x = NewNumberLiteral(x_val + y_val);
+ x = factory()->NewNumberLiteral(x_val + y_val);
continue;
case Token::SUB:
- x = NewNumberLiteral(x_val - y_val);
+ x = factory()->NewNumberLiteral(x_val - y_val);
continue;
case Token::MUL:
- x = NewNumberLiteral(x_val * y_val);
+ x = factory()->NewNumberLiteral(x_val * y_val);
continue;
case Token::DIV:
- x = NewNumberLiteral(x_val / y_val);
+ x = factory()->NewNumberLiteral(x_val / y_val);
continue;
- case Token::BIT_OR:
- x = NewNumberLiteral(DoubleToInt32(x_val) | DoubleToInt32(y_val));
+ case Token::BIT_OR: {
+ int value = DoubleToInt32(x_val) | DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
- case Token::BIT_AND:
- x = NewNumberLiteral(DoubleToInt32(x_val) & DoubleToInt32(y_val));
+ }
+ case Token::BIT_AND: {
+ int value = DoubleToInt32(x_val) & DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
- case Token::BIT_XOR:
- x = NewNumberLiteral(DoubleToInt32(x_val) ^ DoubleToInt32(y_val));
+ }
+ case Token::BIT_XOR: {
+ int value = DoubleToInt32(x_val) ^ DoubleToInt32(y_val);
+ x = factory()->NewNumberLiteral(value);
continue;
+ }
case Token::SHL: {
int value = DoubleToInt32(x_val) << (DoubleToInt32(y_val) & 0x1f);
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SHR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
uint32_t value = DoubleToUint32(x_val) >> shift;
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
case Token::SAR: {
uint32_t shift = DoubleToInt32(y_val) & 0x1f;
int value = ArithmeticShiftRight(DoubleToInt32(x_val), shift);
- x = NewNumberLiteral(value);
+ x = factory()->NewNumberLiteral(value);
continue;
}
default:
@@ -2786,15 +3207,15 @@ Expression* Parser::ParseBinaryExpression(int prec, bool accept_IN, bool* ok) {
case Token::NE_STRICT: cmp = Token::EQ_STRICT; break;
default: break;
}
- x = new(zone()) CompareOperation(isolate(), cmp, x, y, position);
+ x = factory()->NewCompareOperation(cmp, x, y, position);
if (cmp != op) {
// The comparison was negated - add a NOT.
- x = new(zone()) UnaryOperation(isolate(), Token::NOT, x, position);
+ x = factory()->NewUnaryOperation(Token::NOT, x, position);
}
} else {
// We have a "normal" binary operation.
- x = new(zone()) BinaryOperation(isolate(), op, x, y, position);
+ x = factory()->NewBinaryOperation(op, x, y, position);
}
}
}
@@ -2827,7 +3248,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
// Convert the literal to a boolean condition and negate it.
bool condition = literal->ToBoolean()->IsTrue();
Handle<Object> result(isolate()->heap()->ToBoolean(!condition));
- return NewLiteral(result);
+ return factory()->NewLiteral(result);
} else if (literal->IsNumber()) {
// Compute some expressions involving only number literals.
double value = literal->Number();
@@ -2835,9 +3256,9 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
case Token::ADD:
return expression;
case Token::SUB:
- return NewNumberLiteral(-value);
+ return factory()->NewNumberLiteral(-value);
case Token::BIT_NOT:
- return NewNumberLiteral(~DoubleToInt32(value));
+ return factory()->NewNumberLiteral(~DoubleToInt32(value));
default:
break;
}
@@ -2845,7 +3266,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
}
// "delete identifier" is a syntax error in strict mode.
- if (op == Token::DELETE && top_scope_->is_strict_mode()) {
+ if (op == Token::DELETE && !top_scope_->is_classic_mode()) {
VariableProxy* operand = expression->AsVariableProxy();
if (operand != NULL && !operand->is_this()) {
ReportMessage("strict_delete", Vector<const char*>::empty());
@@ -2854,7 +3275,7 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
}
}
- return new(zone()) UnaryOperation(isolate(), op, expression, position);
+ return factory()->NewUnaryOperation(op, expression, position);
} else if (Token::IsCountOp(op)) {
op = Next();
@@ -2869,17 +3290,17 @@ Expression* Parser::ParseUnaryExpression(bool* ok) {
expression = NewThrowReferenceError(type);
}
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
// Prefix expression operand in strict mode may not be eval or arguments.
CheckStrictModeLValue(expression, "strict_lhs_prefix", CHECK_OK);
}
+ MarkAsLValue(expression);
int position = scanner().location().beg_pos;
- return new(zone()) CountOperation(isolate(),
- op,
- true /* prefix */,
- expression,
- position);
+ return factory()->NewCountOperation(op,
+ true /* prefix */,
+ expression,
+ position);
} else {
return ParsePostfixExpression(ok);
@@ -2904,19 +3325,19 @@ Expression* Parser::ParsePostfixExpression(bool* ok) {
expression = NewThrowReferenceError(type);
}
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
// Postfix expression operand in strict mode may not be eval or arguments.
CheckStrictModeLValue(expression, "strict_lhs_prefix", CHECK_OK);
}
+ MarkAsLValue(expression);
Token::Value next = Next();
int position = scanner().location().beg_pos;
expression =
- new(zone()) CountOperation(isolate(),
- next,
- false /* postfix */,
- expression,
- position);
+ factory()->NewCountOperation(next,
+ false /* postfix */,
+ expression,
+ position);
}
return expression;
}
@@ -2939,13 +3360,25 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
- result = new(zone()) Property(isolate(), result, index, pos);
+ result = factory()->NewProperty(result, index, pos);
Expect(Token::RBRACK, CHECK_OK);
break;
}
case Token::LPAREN: {
- int pos = scanner().location().beg_pos;
+ int pos;
+ if (scanner().current_token() == Token::IDENTIFIER) {
+ // For call of an identifier we want to report position of
+ // the identifier as position of the call in the stack trace.
+ pos = scanner().location().beg_pos;
+ } else {
+ // For other kinds of calls we record position of the parenthesis as
+ // position of the call. Note that this is extremely important for
+ // expressions of the form function(){...}() for which call position
+ // should not point to the closing brace otherwise it will intersect
+ // with positions recorded for function literal and confuse debugger.
+ pos = scanner().peek_location().beg_pos;
+ }
ZoneList<Expression*>* args = ParseArguments(CHECK_OK);
// Keep track of eval() calls since they disable all local variable
@@ -2960,7 +3393,7 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
callee->IsVariable(isolate()->factory()->eval_symbol())) {
top_scope_->DeclarationScope()->RecordEvalCall();
}
- result = NewCall(result, args, pos);
+ result = factory()->NewCall(result, args, pos);
break;
}
@@ -2968,10 +3401,8 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
- result = new(zone()) Property(isolate(),
- result,
- NewLiteral(name),
- pos);
+ result =
+ factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
@@ -3007,10 +3438,8 @@ Expression* Parser::ParseNewPrefix(PositionStack* stack, bool* ok) {
if (!stack->is_empty()) {
int last = stack->pop();
- result = new(zone()) CallNew(isolate(),
- result,
- new(zone()) ZoneList<Expression*>(0),
- last);
+ result = factory()->NewCallNew(
+ result, new(zone()) ZoneList<Expression*>(0), last);
}
return result;
}
@@ -3062,7 +3491,7 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
Consume(Token::LBRACK);
int pos = scanner().location().beg_pos;
Expression* index = ParseExpression(true, CHECK_OK);
- result = new(zone()) Property(isolate(), result, index, pos);
+ result = factory()->NewProperty(result, index, pos);
if (fni_ != NULL) {
if (index->IsPropertyName()) {
fni_->PushLiteralName(index->AsLiteral()->AsPropertyName());
@@ -3078,10 +3507,8 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
Consume(Token::PERIOD);
int pos = scanner().location().beg_pos;
Handle<String> name = ParseIdentifierName(CHECK_OK);
- result = new(zone()) Property(isolate(),
- result,
- NewLiteral(name),
- pos);
+ result =
+ factory()->NewProperty(result, factory()->NewLiteral(name), pos);
if (fni_ != NULL) fni_->PushLiteralName(name);
break;
}
@@ -3090,7 +3517,7 @@ Expression* Parser::ParseMemberWithNewPrefixesExpression(PositionStack* stack,
// Consume one of the new prefixes (already parsed).
ZoneList<Expression*>* args = ParseArguments(CHECK_OK);
int last = stack->pop();
- result = new(zone()) CallNew(isolate(), result, args, last);
+ result = factory()->NewCallNew(result, args, last);
break;
}
default:
@@ -3109,7 +3536,7 @@ DebuggerStatement* Parser::ParseDebuggerStatement(bool* ok) {
Expect(Token::DEBUGGER, CHECK_OK);
ExpectSemicolon(CHECK_OK);
- return new(zone()) DebuggerStatement();
+ return factory()->NewDebuggerStatement();
}
@@ -3135,9 +3562,9 @@ void Parser::ReportUnexpectedToken(Token::Value token) {
return ReportMessage("unexpected_reserved",
Vector<const char*>::empty());
case Token::FUTURE_STRICT_RESERVED_WORD:
- return ReportMessage(top_scope_->is_strict_mode() ?
- "unexpected_strict_reserved" :
- "unexpected_token_identifier",
+ return ReportMessage(top_scope_->is_classic_mode() ?
+ "unexpected_token_identifier" :
+ "unexpected_strict_reserved",
Vector<const char*>::empty());
default:
const char* name = Token::String(token);
@@ -3174,33 +3601,37 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
switch (peek()) {
case Token::THIS: {
Consume(Token::THIS);
- result = new(zone()) VariableProxy(isolate(), top_scope_->receiver());
+ result = factory()->NewVariableProxy(top_scope_->receiver());
break;
}
case Token::NULL_LITERAL:
Consume(Token::NULL_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->null_value());
+ result = factory()->NewLiteral(isolate()->factory()->null_value());
break;
case Token::TRUE_LITERAL:
Consume(Token::TRUE_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->true_value());
+ result = factory()->NewLiteral(isolate()->factory()->true_value());
break;
case Token::FALSE_LITERAL:
Consume(Token::FALSE_LITERAL);
- result = new(zone()) Literal(
- isolate(), isolate()->factory()->false_value());
+ result = factory()->NewLiteral(isolate()->factory()->false_value());
break;
case Token::IDENTIFIER:
case Token::FUTURE_STRICT_RESERVED_WORD: {
Handle<String> name = ParseIdentifier(CHECK_OK);
if (fni_ != NULL) fni_->PushVariableName(name);
- result = top_scope_->NewUnresolved(name, scanner().location().beg_pos);
+ // The name may refer to a module instance object, so its type is unknown.
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Variable %s ", name->ToAsciiArray());
+#endif
+ Interface* interface = Interface::NewUnknown();
+ result = top_scope_->NewUnresolved(
+ factory(), name, scanner().location().beg_pos, interface);
break;
}
@@ -3210,14 +3641,14 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
- result = NewNumberLiteral(value);
+ result = factory()->NewNumberLiteral(value);
break;
}
case Token::STRING: {
Consume(Token::STRING);
Handle<String> symbol = GetSymbol(CHECK_OK);
- result = NewLiteral(symbol);
+ result = factory()->NewLiteral(symbol);
if (fni_ != NULL) fni_->PushLiteralName(symbol);
break;
}
@@ -3315,13 +3746,14 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Expect(Token::RBRACK, CHECK_OK);
// Update the scope information before the pre-parsing bailout.
- int literal_index = lexical_scope_->NextMaterializedLiteralIndex();
+ int literal_index = current_function_state_->NextMaterializedLiteralIndex();
// Allocate a fixed array to hold all the object literals.
Handle<FixedArray> object_literals =
isolate()->factory()->NewFixedArray(values->length(), TENURED);
Handle<FixedDoubleArray> double_literals;
ElementsKind elements_kind = FAST_SMI_ONLY_ELEMENTS;
+ bool has_only_undefined_values = true;
// Fill in the literals.
bool is_simple = true;
@@ -3345,6 +3777,7 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
// FAST_DOUBLE_ELEMENTS and FAST_ELEMENTS as necessary. Always remember
// the tagged value, no matter what the ElementsKind is in case we
// ultimately end up in FAST_ELEMENTS.
+ has_only_undefined_values = false;
object_literals->set(i, *boilerplate_value);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
// Smi only elements. Notice if a transition to FAST_DOUBLE_ELEMENTS or
@@ -3383,6 +3816,13 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
}
}
+ // Very small array literals that don't have a concrete hint about their type
+ // from a constant value should default to the slow case to avoid lots of
+ // elements transitions on really small objects.
+ if (has_only_undefined_values && values->length() <= 2) {
+ elements_kind = FAST_ELEMENTS;
+ }
+
// Simple and shallow arrays can be lazily copied, we transform the
// elements array to a copy-on-write array.
if (is_simple && depth == 1 && values->length() > 0 &&
@@ -3402,8 +3842,8 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
literals->set(0, Smi::FromInt(elements_kind));
literals->set(1, *element_values);
- return new(zone()) ArrayLiteral(
- isolate(), literals, values, literal_index, is_simple, depth);
+ return factory()->NewArrayLiteral(
+ literals, values, literal_index, is_simple, depth);
}
@@ -3475,19 +3915,13 @@ Handle<Object> Parser::GetBoilerplateValue(Expression* expression) {
return isolate()->factory()->undefined_value();
}
-// Defined in ast.cc
-bool IsEqualString(void* first, void* second);
-bool IsEqualNumber(void* first, void* second);
-
-
// Validation per 11.1.5 Object Initialiser
class ObjectLiteralPropertyChecker {
public:
- ObjectLiteralPropertyChecker(Parser* parser, bool strict) :
- props(&IsEqualString),
- elems(&IsEqualNumber),
+ ObjectLiteralPropertyChecker(Parser* parser, LanguageMode language_mode) :
+ props_(Literal::Match),
parser_(parser),
- strict_(strict) {
+ language_mode_(language_mode) {
}
void CheckProperty(
@@ -3514,10 +3948,9 @@ class ObjectLiteralPropertyChecker {
}
}
- HashMap props;
- HashMap elems;
+ HashMap props_;
Parser* parser_;
- bool strict_;
+ LanguageMode language_mode_;
};
@@ -3525,49 +3958,14 @@ void ObjectLiteralPropertyChecker::CheckProperty(
ObjectLiteral::Property* property,
Scanner::Location loc,
bool* ok) {
-
ASSERT(property != NULL);
-
- Literal *lit = property->key();
- Handle<Object> handle = lit->handle();
-
- uint32_t hash;
- HashMap* map;
- void* key;
-
- if (handle->IsSymbol()) {
- Handle<String> name(String::cast(*handle));
- if (name->AsArrayIndex(&hash)) {
- Handle<Object> key_handle = FACTORY->NewNumberFromUint(hash);
- key = key_handle.location();
- map = &elems;
- } else {
- key = handle.location();
- hash = name->Hash();
- map = &props;
- }
- } else if (handle->ToArrayIndex(&hash)) {
- key = handle.location();
- map = &elems;
- } else {
- ASSERT(handle->IsNumber());
- double num = handle->Number();
- char arr[100];
- Vector<char> buffer(arr, ARRAY_SIZE(arr));
- const char* str = DoubleToCString(num, buffer);
- Handle<String> name = FACTORY->NewStringFromAscii(CStrVector(str));
- key = name.location();
- hash = name->Hash();
- map = &props;
- }
-
- // Lookup property previously defined, if any.
- HashMap::Entry* entry = map->Lookup(key, hash, true);
+ Literal* literal = property->key();
+ HashMap::Entry* entry = props_.Lookup(literal, literal->Hash(), true);
intptr_t prev = reinterpret_cast<intptr_t> (entry->value);
intptr_t curr = GetPropertyKind(property);
- // Duplicate data properties are illegal in strict mode.
- if (strict_ && (curr & prev & kData) != 0) {
+ // Duplicate data properties are illegal in strict or extended mode.
+ if (language_mode_ != CLASSIC_MODE && (curr & prev & kData) != 0) {
parser_->ReportMessageAt(loc, "strict_duplicate_property",
Vector<const char*>::empty());
*ok = false;
@@ -3678,11 +4076,9 @@ ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter,
RelocInfo::kNoPosition,
FunctionLiteral::ANONYMOUS_EXPRESSION,
CHECK_OK);
- // Allow any number of parameters for compatiabilty with JSC.
+ // Allow any number of parameters for compatibilty with JSC.
// Specification only allows zero parameters for get and one for set.
- ObjectLiteral::Property* property =
- new(zone()) ObjectLiteral::Property(is_getter, value);
- return property;
+ return factory()->NewObjectLiteralProperty(is_getter, value);
} else {
ReportUnexpectedToken(next);
*ok = false;
@@ -3703,7 +4099,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
int number_of_boilerplate_properties = 0;
bool has_function = false;
- ObjectLiteralPropertyChecker checker(this, top_scope_->is_strict_mode());
+ ObjectLiteralPropertyChecker checker(this, top_scope_->language_mode());
Expect(Token::LBRACE, CHECK_OK);
@@ -3747,7 +4143,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
}
// Failed to parse as get/set property, so it's just a property
// called "get" or "set".
- key = NewLiteral(id);
+ key = factory()->NewLiteral(id);
break;
}
case Token::STRING: {
@@ -3756,10 +4152,10 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
if (fni_ != NULL) fni_->PushLiteralName(string);
uint32_t index;
if (!string.is_null() && string->AsArrayIndex(&index)) {
- key = NewNumberLiteral(index);
+ key = factory()->NewNumberLiteral(index);
break;
}
- key = NewLiteral(string);
+ key = factory()->NewLiteral(string);
break;
}
case Token::NUMBER: {
@@ -3768,14 +4164,14 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
double value = StringToDouble(isolate()->unicode_cache(),
scanner().literal_ascii_string(),
ALLOW_HEX | ALLOW_OCTALS);
- key = NewNumberLiteral(value);
+ key = factory()->NewNumberLiteral(value);
break;
}
default:
if (Token::IsKeyword(next)) {
Consume(next);
Handle<String> string = GetSymbol(CHECK_OK);
- key = NewLiteral(string);
+ key = factory()->NewLiteral(string);
} else {
// Unexpected token.
Token::Value next = Next();
@@ -3789,13 +4185,15 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
Expression* value = ParseAssignmentExpression(true, CHECK_OK);
ObjectLiteral::Property* property =
- new(zone()) ObjectLiteral::Property(key, value);
+ new(zone()) ObjectLiteral::Property(key, value, isolate());
- // Mark object literals that contain function literals and pretenure the
- // literal so it can be added as a constant function property.
- if (value->AsFunctionLiteral() != NULL) {
+ // Mark top-level object literals that contain function literals and
+ // pretenure the literal so it can be added as a constant function
+ // property.
+ if (top_scope_->DeclarationScope()->is_global_scope() &&
+ value->AsFunctionLiteral() != NULL) {
has_function = true;
- value->AsFunctionLiteral()->set_pretenure(true);
+ value->AsFunctionLiteral()->set_pretenure();
}
// Count CONSTANT or COMPUTED properties to maintain the enumeration order.
@@ -3815,7 +4213,7 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
Expect(Token::RBRACE, CHECK_OK);
// Computation of literal_index must happen before pre parse bailout.
- int literal_index = lexical_scope_->NextMaterializedLiteralIndex();
+ int literal_index = current_function_state_->NextMaterializedLiteralIndex();
Handle<FixedArray> constant_properties = isolate()->factory()->NewFixedArray(
number_of_boilerplate_properties * 2, TENURED);
@@ -3828,14 +4226,13 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
&is_simple,
&fast_elements,
&depth);
- return new(zone()) ObjectLiteral(isolate(),
- constant_properties,
- properties,
- literal_index,
- is_simple,
- fast_elements,
- depth,
- has_function);
+ return factory()->NewObjectLiteral(constant_properties,
+ properties,
+ literal_index,
+ is_simple,
+ fast_elements,
+ depth,
+ has_function);
}
@@ -3847,15 +4244,14 @@ Expression* Parser::ParseRegExpLiteral(bool seen_equal, bool* ok) {
return NULL;
}
- int literal_index = lexical_scope_->NextMaterializedLiteralIndex();
+ int literal_index = current_function_state_->NextMaterializedLiteralIndex();
Handle<String> js_pattern = NextLiteralString(TENURED);
scanner().ScanRegExpFlags();
Handle<String> js_flags = NextLiteralString(TENURED);
Next();
- return new(zone()) RegExpLiteral(
- isolate(), js_pattern, js_flags, literal_index);
+ return factory()->NewRegExpLiteral(js_pattern, js_flags, literal_index);
}
@@ -3883,6 +4279,98 @@ ZoneList<Expression*>* Parser::ParseArguments(bool* ok) {
}
+class SingletonLogger : public ParserRecorder {
+ public:
+ SingletonLogger() : has_error_(false), start_(-1), end_(-1) { }
+ virtual ~SingletonLogger() { }
+
+ void Reset() { has_error_ = false; }
+
+ virtual void LogFunction(int start,
+ int end,
+ int literals,
+ int properties,
+ LanguageMode mode) {
+ ASSERT(!has_error_);
+ start_ = start;
+ end_ = end;
+ literals_ = literals;
+ properties_ = properties;
+ mode_ = mode;
+ };
+
+ // Logs a symbol creation of a literal or identifier.
+ virtual void LogAsciiSymbol(int start, Vector<const char> literal) { }
+ virtual void LogUtf16Symbol(int start, Vector<const uc16> literal) { }
+
+ // Logs an error message and marks the log as containing an error.
+ // Further logging will be ignored, and ExtractData will return a vector
+ // representing the error only.
+ virtual void LogMessage(int start,
+ int end,
+ const char* message,
+ const char* argument_opt) {
+ has_error_ = true;
+ start_ = start;
+ end_ = end;
+ message_ = message;
+ argument_opt_ = argument_opt;
+ }
+
+ virtual int function_position() { return 0; }
+
+ virtual int symbol_position() { return 0; }
+
+ virtual int symbol_ids() { return -1; }
+
+ virtual Vector<unsigned> ExtractData() {
+ UNREACHABLE();
+ return Vector<unsigned>();
+ }
+
+ virtual void PauseRecording() { }
+
+ virtual void ResumeRecording() { }
+
+ bool has_error() { return has_error_; }
+
+ int start() { return start_; }
+ int end() { return end_; }
+ int literals() {
+ ASSERT(!has_error_);
+ return literals_;
+ }
+ int properties() {
+ ASSERT(!has_error_);
+ return properties_;
+ }
+ LanguageMode language_mode() {
+ ASSERT(!has_error_);
+ return mode_;
+ }
+ const char* message() {
+ ASSERT(has_error_);
+ return message_;
+ }
+ const char* argument_opt() {
+ ASSERT(has_error_);
+ return argument_opt_;
+ }
+
+ private:
+ bool has_error_;
+ int start_;
+ int end_;
+ // For function entries.
+ int literals_;
+ int properties_;
+ LanguageMode mode_;
+ // For error messages.
+ const char* message_;
+ const char* argument_opt_;
+};
+
+
FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
bool name_is_strict_reserved,
int function_token_position,
@@ -3905,17 +4393,20 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
// Function declarations are function scoped in normal mode, so they are
// hoisted. In harmony block scoping mode they are block scoped, so they
// are not hoisted.
- Scope* scope = (type == FunctionLiteral::DECLARATION && !harmony_scoping_)
+ Scope* scope = (type == FunctionLiteral::DECLARATION && !is_extended_mode())
? NewScope(top_scope_->DeclarationScope(), FUNCTION_SCOPE)
: NewScope(top_scope_, FUNCTION_SCOPE);
- ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8);
- int materialized_literal_count;
- int expected_property_count;
+ ZoneList<Statement*>* body = NULL;
+ int materialized_literal_count = -1;
+ int expected_property_count = -1;
+ int handler_count = 0;
bool only_simple_this_property_assignments;
Handle<FixedArray> this_property_assignments;
- bool has_duplicate_parameters = false;
+ FunctionLiteral::ParameterFlag duplicate_parameters =
+ FunctionLiteral::kNoDuplicateParameters;
+ AstProperties ast_properties;
// Parse function body.
- { LexicalScope lexical_scope(this, scope, isolate());
+ { FunctionState function_state(this, scope, isolate());
top_scope_->SetScopeName(function_name);
// FormalParameterList ::
@@ -3938,14 +4429,14 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
name_loc = scanner().location();
}
if (!dupe_loc.IsValid() && top_scope_->IsDeclared(param_name)) {
- has_duplicate_parameters = true;
+ duplicate_parameters = FunctionLiteral::kHasDuplicateParameters;
dupe_loc = scanner().location();
}
if (!reserved_loc.IsValid() && is_strict_reserved) {
reserved_loc = scanner().location();
}
- top_scope_->DeclareParameter(param_name, harmony_scoping_ ? LET : VAR);
+ top_scope_->DeclareParameter(param_name, VAR);
num_parameters++;
if (num_parameters > kMaxNumFunctionParameters) {
ReportMessageAt(scanner().location(), "too_many_parameters",
@@ -3966,78 +4457,128 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
// NOTE: We create a proxy and resolve it here so that in the
// future we can change the AST to only refer to VariableProxies
// instead of Variables and Proxis as is the case now.
+ Variable* fvar = NULL;
+ Token::Value fvar_init_op = Token::INIT_CONST;
if (type == FunctionLiteral::NAMED_EXPRESSION) {
VariableMode fvar_mode;
- Token::Value fvar_init_op;
- if (harmony_scoping_) {
+ if (is_extended_mode()) {
fvar_mode = CONST_HARMONY;
fvar_init_op = Token::INIT_CONST_HARMONY;
} else {
fvar_mode = CONST;
- fvar_init_op = Token::INIT_CONST;
}
- Variable* fvar = top_scope_->DeclareFunctionVar(function_name, fvar_mode);
- VariableProxy* fproxy = top_scope_->NewUnresolved(function_name);
- fproxy->BindTo(fvar);
- body->Add(new(zone()) ExpressionStatement(
- new(zone()) Assignment(isolate(),
- fvar_init_op,
- fproxy,
- new(zone()) ThisFunction(isolate()),
- RelocInfo::kNoPosition)));
- }
-
- // Determine if the function will be lazily compiled. The mode can only
- // be PARSE_LAZILY if the --lazy flag is true. We will not lazily
- // compile if we do not have preparser data for the function.
+ fvar =
+ top_scope_->DeclareFunctionVar(function_name, fvar_mode, factory());
+ }
+
+ // Determine whether the function will be lazily compiled.
+ // The heuristics are:
+ // - It must not have been prohibited by the caller to Parse (some callers
+ // need a full AST).
+ // - The outer scope must be trivial (only global variables in scope).
+ // - The function mustn't be a function expression with an open parenthesis
+ // before; we consider that a hint that the function will be called
+ // immediately, and it would be a waste of time to make it lazily
+ // compiled.
+ // These are all things we can know at this point, without looking at the
+ // function itself.
bool is_lazily_compiled = (mode() == PARSE_LAZILY &&
top_scope_->outer_scope()->is_global_scope() &&
top_scope_->HasTrivialOuterContext() &&
- !parenthesized_function_ &&
- pre_data() != NULL);
+ !parenthesized_function_);
parenthesized_function_ = false; // The bit was set for this function only.
if (is_lazily_compiled) {
int function_block_pos = scanner().location().beg_pos;
- FunctionEntry entry = pre_data()->GetFunctionEntry(function_block_pos);
- if (!entry.is_valid()) {
- // There is no preparser data for the function, we will not lazily
- // compile after all.
- is_lazily_compiled = false;
+ FunctionEntry entry;
+ if (pre_data_ != NULL) {
+ // If we have pre_data_, we use it to skip parsing the function body.
+ // the preparser data contains the information we need to construct the
+ // lazy function.
+ entry = pre_data()->GetFunctionEntry(function_block_pos);
+ if (entry.is_valid()) {
+ if (entry.end_pos() <= function_block_pos) {
+ // End position greater than end of stream is safe, and hard
+ // to check.
+ ReportInvalidPreparseData(function_name, CHECK_OK);
+ }
+ scanner().SeekForward(entry.end_pos() - 1);
+
+ scope->set_end_position(entry.end_pos());
+ Expect(Token::RBRACE, CHECK_OK);
+ isolate()->counters()->total_preparse_skipped()->Increment(
+ scope->end_position() - function_block_pos);
+ materialized_literal_count = entry.literal_count();
+ expected_property_count = entry.property_count();
+ top_scope_->SetLanguageMode(entry.language_mode());
+ only_simple_this_property_assignments = false;
+ this_property_assignments = isolate()->factory()->empty_fixed_array();
+ } else {
+ is_lazily_compiled = false;
+ }
} else {
- scope->set_end_position(entry.end_pos());
- if (scope->end_position() <= function_block_pos) {
- // End position greater than end of stream is safe, and hard to check.
- ReportInvalidPreparseData(function_name, CHECK_OK);
+ // With no preparser data, we partially parse the function, without
+ // building an AST. This gathers the data needed to build a lazy
+ // function.
+ SingletonLogger logger;
+ preparser::PreParser::PreParseResult result =
+ LazyParseFunctionLiteral(&logger);
+ if (result == preparser::PreParser::kPreParseStackOverflow) {
+ // Propagate stack overflow.
+ stack_overflow_ = true;
+ *ok = false;
+ return NULL;
}
+ if (logger.has_error()) {
+ const char* arg = logger.argument_opt();
+ Vector<const char*> args;
+ if (arg != NULL) {
+ args = Vector<const char*>(&arg, 1);
+ }
+ ReportMessageAt(Scanner::Location(logger.start(), logger.end()),
+ logger.message(), args);
+ *ok = false;
+ return NULL;
+ }
+ scope->set_end_position(logger.end());
+ Expect(Token::RBRACE, CHECK_OK);
isolate()->counters()->total_preparse_skipped()->Increment(
scope->end_position() - function_block_pos);
- // Seek to position just before terminal '}'.
- scanner().SeekForward(scope->end_position() - 1);
- materialized_literal_count = entry.literal_count();
- expected_property_count = entry.property_count();
- top_scope_->SetStrictModeFlag(entry.strict_mode_flag());
+ materialized_literal_count = logger.literals();
+ expected_property_count = logger.properties();
+ top_scope_->SetLanguageMode(logger.language_mode());
only_simple_this_property_assignments = false;
this_property_assignments = isolate()->factory()->empty_fixed_array();
- Expect(Token::RBRACE, CHECK_OK);
}
}
if (!is_lazily_compiled) {
- ParseSourceElements(body, Token::RBRACE, CHECK_OK);
+ body = new(zone()) ZoneList<Statement*>(8);
+ if (fvar != NULL) {
+ VariableProxy* fproxy =
+ top_scope_->NewUnresolved(factory(), function_name);
+ fproxy->BindTo(fvar);
+ body->Add(factory()->NewExpressionStatement(
+ factory()->NewAssignment(fvar_init_op,
+ fproxy,
+ factory()->NewThisFunction(),
+ RelocInfo::kNoPosition)));
+ }
+ ParseSourceElements(body, Token::RBRACE, false, CHECK_OK);
- materialized_literal_count = lexical_scope.materialized_literal_count();
- expected_property_count = lexical_scope.expected_property_count();
+ materialized_literal_count = function_state.materialized_literal_count();
+ expected_property_count = function_state.expected_property_count();
+ handler_count = function_state.handler_count();
only_simple_this_property_assignments =
- lexical_scope.only_simple_this_property_assignments();
- this_property_assignments = lexical_scope.this_property_assignments();
+ function_state.only_simple_this_property_assignments();
+ this_property_assignments = function_state.this_property_assignments();
Expect(Token::RBRACE, CHECK_OK);
scope->set_end_position(scanner().location().end_pos);
}
// Validate strict mode.
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
if (IsEvalOrArguments(function_name)) {
int start_pos = scope->start_position();
int position = function_token_position != RelocInfo::kNoPosition
@@ -4082,31 +4623,56 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> function_name,
scope->end_position(),
CHECK_OK);
}
+ ast_properties = *factory()->visitor()->ast_properties();
}
- if (harmony_scoping_) {
+ if (is_extended_mode()) {
CheckConflictingVarDeclarations(scope, CHECK_OK);
}
FunctionLiteral* function_literal =
- new(zone()) FunctionLiteral(isolate(),
- function_name,
- scope,
- body,
- materialized_literal_count,
- expected_property_count,
- only_simple_this_property_assignments,
- this_property_assignments,
- num_parameters,
- type,
- has_duplicate_parameters);
+ factory()->NewFunctionLiteral(function_name,
+ scope,
+ body,
+ materialized_literal_count,
+ expected_property_count,
+ handler_count,
+ only_simple_this_property_assignments,
+ this_property_assignments,
+ num_parameters,
+ duplicate_parameters,
+ type,
+ FunctionLiteral::kIsFunction);
function_literal->set_function_token_position(function_token_position);
+ function_literal->set_ast_properties(&ast_properties);
if (fni_ != NULL && should_infer_name) fni_->AddFunction(function_literal);
return function_literal;
}
+preparser::PreParser::PreParseResult Parser::LazyParseFunctionLiteral(
+ SingletonLogger* logger) {
+ HistogramTimerScope preparse_scope(isolate()->counters()->pre_parse());
+ ASSERT_EQ(Token::LBRACE, scanner().current_token());
+
+ if (reusable_preparser_ == NULL) {
+ intptr_t stack_limit = isolate()->stack_guard()->real_climit();
+ bool do_allow_lazy = true;
+ reusable_preparser_ = new preparser::PreParser(&scanner_,
+ NULL,
+ stack_limit,
+ do_allow_lazy,
+ allow_natives_syntax_,
+ allow_modules_);
+ }
+ preparser::PreParser::PreParseResult result =
+ reusable_preparser_->PreParseLazyFunction(top_scope_->language_mode(),
+ logger);
+ return result;
+}
+
+
Expression* Parser::ParseV8Intrinsic(bool* ok) {
// CallRuntime ::
// '%' Identifier Arguments
@@ -4149,7 +4715,7 @@ Expression* Parser::ParseV8Intrinsic(bool* ok) {
}
// We have a valid intrinsics call or a call to a builtin.
- return new(zone()) CallRuntime(isolate(), name, function, args);
+ return factory()->NewCallRuntime(name, function, args);
}
@@ -4204,25 +4770,32 @@ void Parser::ExpectSemicolon(bool* ok) {
}
-Literal* Parser::GetLiteralUndefined() {
- return NewLiteral(isolate()->factory()->undefined_value());
+void Parser::ExpectContextualKeyword(const char* keyword, bool* ok) {
+ Expect(Token::IDENTIFIER, ok);
+ if (!*ok) return;
+ Handle<String> symbol = GetSymbol(ok);
+ if (!*ok) return;
+ if (!symbol->IsEqualTo(CStrVector(keyword))) {
+ *ok = false;
+ ReportUnexpectedToken(scanner().current_token());
+ }
}
-Literal* Parser::GetLiteralTheHole() {
- return NewLiteral(isolate()->factory()->the_hole_value());
+Literal* Parser::GetLiteralUndefined() {
+ return factory()->NewLiteral(isolate()->factory()->undefined_value());
}
-Literal* Parser::GetLiteralNumber(double value) {
- return NewNumberLiteral(value);
+Literal* Parser::GetLiteralTheHole() {
+ return factory()->NewLiteral(isolate()->factory()->the_hole_value());
}
// Parses an identifier that is valid for the current scope, in particular it
// fails on strict mode future reserved keywords in a strict scope.
Handle<String> Parser::ParseIdentifier(bool* ok) {
- if (top_scope_->is_strict_mode()) {
+ if (!top_scope_->is_classic_mode()) {
Expect(Token::IDENTIFIER, ok);
} else if (!Check(Token::IDENTIFIER)) {
Expect(Token::FUTURE_STRICT_RESERVED_WORD, ok);
@@ -4260,12 +4833,21 @@ Handle<String> Parser::ParseIdentifierName(bool* ok) {
}
+void Parser::MarkAsLValue(Expression* expression) {
+ VariableProxy* proxy = expression != NULL
+ ? expression->AsVariableProxy()
+ : NULL;
+
+ if (proxy != NULL) proxy->MarkAsLValue();
+}
+
+
// Checks LHS expression for assignment and prefix/postfix increment/decrement
// in strict mode.
void Parser::CheckStrictModeLValue(Expression* expression,
const char* error,
bool* ok) {
- ASSERT(top_scope_->is_strict_mode());
+ ASSERT(!top_scope_->is_classic_mode());
VariableProxy* lhs = expression != NULL
? expression->AsVariableProxy()
: NULL;
@@ -4384,11 +4966,6 @@ void Parser::RegisterTargetUse(Label* target, Target* stop) {
}
-Literal* Parser::NewNumberLiteral(double number) {
- return NewLiteral(isolate()->factory()->NewNumber(number, TENURED));
-}
-
-
Expression* Parser::NewThrowReferenceError(Handle<String> type) {
return NewThrowError(isolate()->factory()->MakeReferenceError_symbol(),
type, HandleVector<Object>(NULL, 0));
@@ -4428,19 +5005,15 @@ Expression* Parser::NewThrowError(Handle<String> constructor,
elements->set(i, *element);
}
}
- Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements(elements,
- TENURED);
+ Handle<JSArray> array = isolate()->factory()->NewJSArrayWithElements(
+ elements, FAST_ELEMENTS, TENURED);
ZoneList<Expression*>* args = new(zone()) ZoneList<Expression*>(2);
- args->Add(NewLiteral(type));
- args->Add(NewLiteral(array));
- CallRuntime* call_constructor = new(zone()) CallRuntime(isolate(),
- constructor,
- NULL,
- args);
- return new(zone()) Throw(isolate(),
- call_constructor,
- scanner().location().beg_pos);
+ args->Add(factory()->NewLiteral(type));
+ args->Add(factory()->NewLiteral(array));
+ CallRuntime* call_constructor =
+ factory()->NewCallRuntime(constructor, NULL, args);
+ return factory()->NewThrow(call_constructor, scanner().location().beg_pos);
}
// ----------------------------------------------------------------------------
@@ -5317,18 +5890,21 @@ int ScriptDataImpl::ReadNumber(byte** source) {
// Create a Scanner for the preparser to use as input, and preparse the source.
-static ScriptDataImpl* DoPreParse(UC16CharacterStream* source,
+static ScriptDataImpl* DoPreParse(Utf16CharacterStream* source,
int flags,
ParserRecorder* recorder) {
Isolate* isolate = Isolate::Current();
+ HistogramTimerScope timer(isolate->counters()->pre_parse());
Scanner scanner(isolate->unicode_cache());
- scanner.SetHarmonyScoping((flags & kHarmonyScoping) != 0);
+ scanner.SetHarmonyScoping(FLAG_harmony_scoping);
scanner.Initialize(source);
intptr_t stack_limit = isolate->stack_guard()->real_climit();
- if (!preparser::PreParser::PreParseProgram(&scanner,
- recorder,
- flags,
- stack_limit)) {
+ preparser::PreParser::PreParseResult result =
+ preparser::PreParser::PreParseProgram(&scanner,
+ recorder,
+ flags,
+ stack_limit);
+ if (result == preparser::PreParser::kPreParseStackOverflow) {
isolate->StackOverflow();
return NULL;
}
@@ -5342,7 +5918,7 @@ static ScriptDataImpl* DoPreParse(UC16CharacterStream* source,
// Preparse, but only collect data that is immediately useful,
// even if the preparser data is only used once.
-ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source,
+ScriptDataImpl* ParserApi::PartialPreParse(Handle<String> source,
v8::Extension* extension,
int flags) {
bool allow_lazy = FLAG_lazy && (extension == NULL);
@@ -5353,11 +5929,19 @@ ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source,
}
flags |= kAllowLazy;
PartialParserRecorder recorder;
- return DoPreParse(source, flags, &recorder);
+ int source_length = source->length();
+ if (source->IsExternalTwoByteString()) {
+ ExternalTwoByteStringUtf16CharacterStream stream(
+ Handle<ExternalTwoByteString>::cast(source), 0, source_length);
+ return DoPreParse(&stream, flags, &recorder);
+ } else {
+ GenericStringUtf16CharacterStream stream(source, 0, source_length);
+ return DoPreParse(&stream, flags, &recorder);
+ }
}
-ScriptDataImpl* ParserApi::PreParse(UC16CharacterStream* source,
+ScriptDataImpl* ParserApi::PreParse(Utf16CharacterStream* source,
v8::Extension* extension,
int flags) {
Handle<Script> no_script;
@@ -5391,28 +5975,33 @@ bool RegExpParser::ParseRegExp(FlatStringReader* input,
}
-bool ParserApi::Parse(CompilationInfo* info) {
+bool ParserApi::Parse(CompilationInfo* info, int parsing_flags) {
ASSERT(info->function() == NULL);
FunctionLiteral* result = NULL;
Handle<Script> script = info->script();
- bool harmony_scoping = !info->is_native() && FLAG_harmony_scoping;
+ ASSERT((parsing_flags & kLanguageModeMask) == CLASSIC_MODE);
+ if (!info->is_native() && FLAG_harmony_scoping) {
+ // Harmony scoping is requested.
+ parsing_flags |= EXTENDED_MODE;
+ }
+ if (!info->is_native() && FLAG_harmony_modules) {
+ parsing_flags |= kAllowModules;
+ }
+ if (FLAG_allow_natives_syntax || info->is_native()) {
+ // We require %identifier(..) syntax.
+ parsing_flags |= kAllowNativesSyntax;
+ }
if (info->is_lazy()) {
- bool allow_natives_syntax =
- FLAG_allow_natives_syntax ||
- info->is_native();
- Parser parser(script, allow_natives_syntax, NULL, NULL);
- parser.SetHarmonyScoping(harmony_scoping);
- result = parser.ParseLazy(info);
+ ASSERT(!info->is_eval());
+ Parser parser(script, parsing_flags, NULL, NULL);
+ if (info->shared_info()->is_function()) {
+ result = parser.ParseLazy(info);
+ } else {
+ result = parser.ParseProgram(info);
+ }
} else {
- // Whether we allow %identifier(..) syntax.
- bool allow_natives_syntax =
- info->is_native() || FLAG_allow_natives_syntax;
ScriptDataImpl* pre_data = info->pre_parse_data();
- Parser parser(script,
- allow_natives_syntax,
- info->extension(),
- pre_data);
- parser.SetHarmonyScoping(harmony_scoping);
+ Parser parser(script, parsing_flags, info->extension(), pre_data);
if (pre_data != NULL && pre_data->has_error()) {
Scanner::Location loc = pre_data->MessageLocation();
const char* message = pre_data->BuildMessage();
@@ -5425,11 +6014,7 @@ bool ParserApi::Parse(CompilationInfo* info) {
DeleteArray(args.start());
ASSERT(info->isolate()->has_pending_exception());
} else {
- Handle<String> source = Handle<String>(String::cast(script->source()));
- result = parser.ParseProgram(source,
- info->is_global(),
- info->strict_mode_flag(),
- info->is_qml_mode());
+ result = parser.ParseProgram(info);
}
}
info->SetFunction(result);
diff --git a/src/3rdparty/v8/src/parser.h b/src/3rdparty/v8/src/parser.h
index 407d75f..b4d8825 100644
--- a/src/3rdparty/v8/src/parser.h
+++ b/src/3rdparty/v8/src/parser.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,8 +43,6 @@ class FuncNameInferrer;
class ParserLog;
class PositionStack;
class Target;
-class LexicalScope;
-class SaveScope;
template <typename T> class ZoneListWrapper;
@@ -69,30 +67,36 @@ class ParserMessage : public Malloced {
class FunctionEntry BASE_EMBEDDED {
public:
- explicit FunctionEntry(Vector<unsigned> backing) : backing_(backing) { }
- FunctionEntry() : backing_(Vector<unsigned>::empty()) { }
-
- int start_pos() { return backing_[kStartPosOffset]; }
- int end_pos() { return backing_[kEndPosOffset]; }
- int literal_count() { return backing_[kLiteralCountOffset]; }
- int property_count() { return backing_[kPropertyCountOffset]; }
- StrictModeFlag strict_mode_flag() {
- ASSERT(backing_[kStrictModeOffset] == kStrictMode ||
- backing_[kStrictModeOffset] == kNonStrictMode);
- return static_cast<StrictModeFlag>(backing_[kStrictModeOffset]);
- }
+ enum {
+ kStartPositionIndex,
+ kEndPositionIndex,
+ kLiteralCountIndex,
+ kPropertyCountIndex,
+ kLanguageModeIndex,
+ kSize
+ };
- bool is_valid() { return backing_.length() > 0; }
+ explicit FunctionEntry(Vector<unsigned> backing)
+ : backing_(backing) { }
- static const int kSize = 5;
+ FunctionEntry() : backing_() { }
+
+ int start_pos() { return backing_[kStartPositionIndex]; }
+ int end_pos() { return backing_[kEndPositionIndex]; }
+ int literal_count() { return backing_[kLiteralCountIndex]; }
+ int property_count() { return backing_[kPropertyCountIndex]; }
+ LanguageMode language_mode() {
+ ASSERT(backing_[kLanguageModeIndex] == CLASSIC_MODE ||
+ backing_[kLanguageModeIndex] == STRICT_MODE ||
+ backing_[kLanguageModeIndex] == EXTENDED_MODE);
+ return static_cast<LanguageMode>(backing_[kLanguageModeIndex]);
+ }
+
+ bool is_valid() { return !backing_.is_empty(); }
private:
Vector<unsigned> backing_;
- static const int kStartPosOffset = 0;
- static const int kEndPosOffset = 1;
- static const int kLiteralCountOffset = 2;
- static const int kPropertyCountOffset = 3;
- static const int kStrictModeOffset = 4;
+ bool owns_data_;
};
@@ -104,7 +108,7 @@ class ScriptDataImpl : public ScriptData {
// Create an empty ScriptDataImpl that is guaranteed to not satisfy
// a SanityCheck.
- ScriptDataImpl() : store_(Vector<unsigned>()), owns_store_(false) { }
+ ScriptDataImpl() : owns_store_(false) { }
virtual ~ScriptDataImpl();
virtual int Length();
@@ -165,16 +169,16 @@ class ParserApi {
// Parses the source code represented by the compilation info and sets its
// function literal. Returns false (and deallocates any allocated AST
// nodes) if parsing failed.
- static bool Parse(CompilationInfo* info);
+ static bool Parse(CompilationInfo* info, int flags);
// Generic preparser generating full preparse data.
- static ScriptDataImpl* PreParse(UC16CharacterStream* source,
+ static ScriptDataImpl* PreParse(Utf16CharacterStream* source,
v8::Extension* extension,
int flags);
// Preparser that only does preprocessing that makes sense if only used
// immediately after.
- static ScriptDataImpl* PartialPreParse(UC16CharacterStream* source,
+ static ScriptDataImpl* PartialPreParse(Handle<String> source,
v8::Extension* extension,
int flags);
};
@@ -182,7 +186,7 @@ class ParserApi {
// ----------------------------------------------------------------------------
// REGEXP PARSING
-// A BuffferedZoneList is an automatically growing list, just like (and backed
+// A BufferedZoneList is an automatically growing list, just like (and backed
// by) a ZoneList, that is optimized for the case of adding and removing
// a single element. The last element added is stored outside the backing list,
// and if no more than one element is ever added, the ZoneList isn't even
@@ -421,20 +425,22 @@ class RegExpParser {
// ----------------------------------------------------------------------------
// JAVASCRIPT PARSING
+// Forward declaration.
+class SingletonLogger;
+
class Parser {
public:
Parser(Handle<Script> script,
- bool allow_natives_syntax,
+ int parsing_flags, // Combination of ParsingFlags
v8::Extension* extension,
ScriptDataImpl* pre_data);
- virtual ~Parser() { }
+ virtual ~Parser() {
+ delete reusable_preparser_;
+ reusable_preparser_ = NULL;
+ }
// Returns NULL if parsing failed.
- FunctionLiteral* ParseProgram(Handle<String> source,
- bool in_global_context,
- StrictModeFlag strict_mode,
- bool qml_mode = false);
-
+ FunctionLiteral* ParseProgram(CompilationInfo* info);
FunctionLiteral* ParseLazy(CompilationInfo* info);
void ReportMessageAt(Scanner::Location loc,
@@ -443,7 +449,6 @@ class Parser {
void ReportMessageAt(Scanner::Location loc,
const char* message,
Vector<Handle<String> > args);
- void SetHarmonyScoping(bool block_scoping);
private:
// Limit on number of function parameters is chosen arbitrarily.
@@ -452,16 +457,15 @@ class Parser {
// should be checked.
static const int kMaxNumFunctionParameters = 32766;
static const int kMaxNumFunctionLocals = 32767;
- FunctionLiteral* ParseLazy(CompilationInfo* info,
- UC16CharacterStream* source,
- ZoneScope* zone_scope);
+
enum Mode {
PARSE_LAZILY,
PARSE_EAGERLY
};
enum VariableDeclarationContext {
- kSourceElement,
+ kModuleElement,
+ kBlockElement,
kStatement,
kForStatement
};
@@ -472,25 +476,101 @@ class Parser {
kHasNoInitializers
};
+ class BlockState;
+
+ class FunctionState BASE_EMBEDDED {
+ public:
+ FunctionState(Parser* parser,
+ Scope* scope,
+ Isolate* isolate);
+ ~FunctionState();
+
+ int NextMaterializedLiteralIndex() {
+ return next_materialized_literal_index_++;
+ }
+ int materialized_literal_count() {
+ return next_materialized_literal_index_ - JSFunction::kLiteralsPrefixSize;
+ }
+
+ int NextHandlerIndex() { return next_handler_index_++; }
+ int handler_count() { return next_handler_index_; }
+
+ void SetThisPropertyAssignmentInfo(
+ bool only_simple_this_property_assignments,
+ Handle<FixedArray> this_property_assignments) {
+ only_simple_this_property_assignments_ =
+ only_simple_this_property_assignments;
+ this_property_assignments_ = this_property_assignments;
+ }
+ bool only_simple_this_property_assignments() {
+ return only_simple_this_property_assignments_;
+ }
+ Handle<FixedArray> this_property_assignments() {
+ return this_property_assignments_;
+ }
+
+ void AddProperty() { expected_property_count_++; }
+ int expected_property_count() { return expected_property_count_; }
+
+ AstNodeFactory<AstConstructionVisitor>* factory() { return &factory_; }
+
+ private:
+ // Used to assign an index to each literal that needs materialization in
+ // the function. Includes regexp literals, and boilerplate for object and
+ // array literals.
+ int next_materialized_literal_index_;
+
+ // Used to assign a per-function index to try and catch handlers.
+ int next_handler_index_;
+
+ // Properties count estimation.
+ int expected_property_count_;
+
+ // Keeps track of assignments to properties of this. Used for
+ // optimizing constructors.
+ bool only_simple_this_property_assignments_;
+ Handle<FixedArray> this_property_assignments_;
+
+ Parser* parser_;
+ FunctionState* outer_function_state_;
+ Scope* outer_scope_;
+ int saved_ast_node_id_;
+ AstNodeFactory<AstConstructionVisitor> factory_;
+ };
+
+
+
+
+ FunctionLiteral* ParseLazy(CompilationInfo* info,
+ Utf16CharacterStream* source,
+ ZoneScope* zone_scope);
+
Isolate* isolate() { return isolate_; }
Zone* zone() { return isolate_->zone(); }
// Called by ParseProgram after setting up the scanner.
- FunctionLiteral* DoParseProgram(Handle<String> source,
- bool in_global_context,
- StrictModeFlag strict_mode,
- bool qml_mode,
+ FunctionLiteral* DoParseProgram(CompilationInfo* info,
+ Handle<String> source,
ZoneScope* zone_scope);
// Report syntax error
void ReportUnexpectedToken(Token::Value token);
void ReportInvalidPreparseData(Handle<String> name, bool* ok);
void ReportMessage(const char* message, Vector<const char*> args);
+ void ReportMessage(const char* message, Vector<Handle<String> > args);
bool inside_with() const { return top_scope_->inside_with(); }
Scanner& scanner() { return scanner_; }
Mode mode() const { return mode_; }
ScriptDataImpl* pre_data() const { return pre_data_; }
+ bool is_extended_mode() {
+ ASSERT(top_scope_ != NULL);
+ return top_scope_->is_extended_mode();
+ }
+ Scope* DeclarationScope(VariableMode mode) {
+ return (mode == LET || mode == CONST_HARMONY)
+ ? top_scope_ : top_scope_->DeclarationScope();
+ }
// Check if the given string is 'eval' or 'arguments'.
bool IsEvalOrArguments(Handle<String> string);
@@ -500,16 +580,28 @@ class Parser {
// By making the 'exception handling' explicit, we are forced to check
// for failure at the call sites.
void* ParseSourceElements(ZoneList<Statement*>* processor,
- int end_token, bool* ok);
- Statement* ParseSourceElement(ZoneStringList* labels, bool* ok);
+ int end_token, bool is_eval, bool* ok);
+ Statement* ParseModuleElement(ZoneStringList* labels, bool* ok);
+ Block* ParseModuleDeclaration(ZoneStringList* names, bool* ok);
+ Module* ParseModule(bool* ok);
+ Module* ParseModuleLiteral(bool* ok);
+ Module* ParseModulePath(bool* ok);
+ Module* ParseModuleVariable(bool* ok);
+ Module* ParseModuleUrl(bool* ok);
+ Module* ParseModuleSpecifier(bool* ok);
+ Block* ParseImportDeclaration(bool* ok);
+ Statement* ParseExportDeclaration(bool* ok);
+ Statement* ParseBlockElement(ZoneStringList* labels, bool* ok);
Statement* ParseStatement(ZoneStringList* labels, bool* ok);
- Statement* ParseFunctionDeclaration(bool* ok);
+ Statement* ParseFunctionDeclaration(ZoneStringList* names, bool* ok);
Statement* ParseNativeDeclaration(bool* ok);
Block* ParseBlock(ZoneStringList* labels, bool* ok);
Block* ParseVariableStatement(VariableDeclarationContext var_context,
+ ZoneStringList* names,
bool* ok);
Block* ParseVariableDeclarations(VariableDeclarationContext var_context,
VariableDeclarationProperties* decl_props,
+ ZoneStringList* names,
Handle<String>* out,
bool* ok);
Statement* ParseExpressionOrLabelledStatement(ZoneStringList* labels,
@@ -612,6 +704,7 @@ class Parser {
void Expect(Token::Value token, bool* ok);
bool Check(Token::Value token);
void ExpectSemicolon(bool* ok);
+ void ExpectContextualKeyword(const char* keyword, bool* ok);
Handle<String> LiteralString(PretenureFlag tenured) {
if (scanner().is_literal_ascii()) {
@@ -619,7 +712,7 @@ class Parser {
scanner().literal_ascii_string(), tenured);
} else {
return isolate_->factory()->NewStringFromTwoByte(
- scanner().literal_uc16_string(), tenured);
+ scanner().literal_utf16_string(), tenured);
}
}
@@ -629,7 +722,7 @@ class Parser {
scanner().next_literal_ascii_string(), tenured);
} else {
return isolate_->factory()->NewStringFromTwoByte(
- scanner().next_literal_uc16_string(), tenured);
+ scanner().next_literal_utf16_string(), tenured);
}
}
@@ -638,7 +731,6 @@ class Parser {
// Get odd-ball literals.
Literal* GetLiteralUndefined();
Literal* GetLiteralTheHole();
- Literal* GetLiteralNumber(double value);
Handle<String> ParseIdentifier(bool* ok);
Handle<String> ParseIdentifierOrStrictReservedWord(
@@ -648,6 +740,11 @@ class Parser {
bool* is_set,
bool* ok);
+ // Determine if the expression is a variable proxy and mark it as being used
+ // in an assignment or with a increment/decrement operator. This is currently
+ // used on for the statically checking assignments to harmony const bindings.
+ void MarkAsLValue(Expression* expression);
+
// Strict mode validation of LValue expressions
void CheckStrictModeLValue(Expression* expression,
const char* error,
@@ -668,10 +765,10 @@ class Parser {
void CheckConflictingVarDeclarations(Scope* scope, bool* ok);
// Parser support
- VariableProxy* Declare(Handle<String> name, VariableMode mode,
- FunctionLiteral* fun,
- bool resolve,
- bool* ok);
+ VariableProxy* NewUnresolved(Handle<String> name,
+ VariableMode mode,
+ Interface* interface = Interface::NewValue());
+ void Declare(Declaration* declaration, bool resolve, bool* ok);
bool TargetStackContainsLabel(Handle<String> label);
BreakableStatement* LookupBreakTarget(Handle<String> label, bool* ok);
@@ -681,30 +778,12 @@ class Parser {
// Factory methods.
- Statement* EmptyStatement() {
- static v8::internal::EmptyStatement empty;
- return &empty;
- }
-
Scope* NewScope(Scope* parent, ScopeType type);
Handle<String> LookupSymbol(int symbol_id);
Handle<String> LookupCachedSymbol(int symbol_id);
- Expression* NewCall(Expression* expression,
- ZoneList<Expression*>* arguments,
- int pos) {
- return new(zone()) Call(isolate(), expression, arguments, pos);
- }
-
- inline Literal* NewLiteral(Handle<Object> handle) {
- return new(zone()) Literal(isolate(), handle);
- }
-
- // Create a number literal.
- Literal* NewNumberLiteral(double value);
-
// Generate AST node that throw a ReferenceError with the given type.
Expression* NewThrowReferenceError(Handle<String> type);
@@ -724,33 +803,39 @@ class Parser {
Handle<String> type,
Vector< Handle<Object> > arguments);
+ preparser::PreParser::PreParseResult LazyParseFunctionLiteral(
+ SingletonLogger* logger);
+
+ AstNodeFactory<AstConstructionVisitor>* factory() {
+ return current_function_state_->factory();
+ }
+
Isolate* isolate_;
ZoneList<Handle<String> > symbol_cache_;
Handle<Script> script_;
Scanner scanner_;
-
+ preparser::PreParser* reusable_preparser_;
Scope* top_scope_;
-
- LexicalScope* lexical_scope_;
- Mode mode_;
-
+ FunctionState* current_function_state_;
Target* target_stack_; // for break, continue statements
- bool allow_natives_syntax_;
v8::Extension* extension_;
- bool is_pre_parsing_;
ScriptDataImpl* pre_data_;
FuncNameInferrer* fni_;
+
+ Mode mode_;
+ bool allow_natives_syntax_;
+ bool allow_lazy_;
+ bool allow_modules_;
bool stack_overflow_;
// If true, the next (and immediately following) function literal is
// preceded by a parenthesis.
// Heuristically that means that the function will be called immediately,
// so never lazily compile it.
bool parenthesized_function_;
- bool harmony_scoping_;
- friend class LexicalScope;
- friend class SaveScope;
+ friend class BlockState;
+ friend class FunctionState;
};
diff --git a/src/3rdparty/v8/src/platform-cygwin.cc b/src/3rdparty/v8/src/platform-cygwin.cc
index a72f5da..8b1e381 100644
--- a/src/3rdparty/v8/src/platform-cygwin.cc
+++ b/src/3rdparty/v8/src/platform-cygwin.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,6 +41,7 @@
#include "v8.h"
+#include "platform-posix.h"
#include "platform.h"
#include "v8threads.h"
#include "vm-state-inl.h"
@@ -61,18 +62,10 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
-
uint64_t OS::CpuFeaturesImpliedByPlatform() {
return 0; // Nothing special about Cygwin.
}
@@ -114,7 +107,7 @@ double OS::LocalTimeOffset() {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -290,7 +283,7 @@ void OS::LogSharedLibraryAddresses() {
}
LOG(isolate, SharedLibraryEvent(lib_name, start, end));
} else {
- // Entry not describing executable data. Skip to end of line to setup
+ // Entry not describing executable data. Skip to end of line to set up
// reading the next entry.
do {
c = getc(fp);
@@ -355,6 +348,17 @@ bool VirtualMemory::Uncommit(void* address, size_t size) {
}
+bool VirtualMemory::Guard(void* address) {
+ if (NULL == VirtualAlloc(address,
+ OS::CommitPageSize(),
+ MEM_COMMIT,
+ PAGE_READONLY | PAGE_GUARD)) {
+ return false;
+ }
+ return true;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() : thread_(kNoThread) {}
@@ -365,16 +369,9 @@ class Thread::PlatformData : public Malloced {
Thread::Thread(const Options& options)
- : data_(new PlatformData),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-
-Thread::Thread(const char* name)
- : data_(new PlatformData),
- stack_size_(0) {
- set_name(name);
+ : data_(new PlatformData()),
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -617,10 +614,18 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread {
public:
+ static const int kSamplerThreadStackSize = 64 * KB;
+
explicit SamplerThread(int interval)
- : Thread("SamplerThread"),
+ : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
@@ -722,14 +727,28 @@ class SamplerThread : public Thread {
static Mutex* mutex_;
static SamplerThread* instance_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SamplerThread);
};
-Mutex* SamplerThread::mutex_ = OS::CreateMutex();
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-freebsd.cc b/src/3rdparty/v8/src/platform-freebsd.cc
index 20bd837..6b1c987 100644
--- a/src/3rdparty/v8/src/platform-freebsd.cc
+++ b/src/3rdparty/v8/src/platform-freebsd.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -54,6 +54,7 @@
#include "v8.h"
#include "v8threads.h"
+#include "platform-posix.h"
#include "platform.h"
#include "vm-state-inl.h"
@@ -79,15 +80,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
@@ -128,7 +122,7 @@ double OS::LocalTimeOffset() {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -411,6 +405,12 @@ bool VirtualMemory::Uncommit(void* address, size_t size) {
}
+bool VirtualMemory::Guard(void* address) {
+ OS::Guard(address, OS::CommitPageSize());
+ return true;
+}
+
+
void* VirtualMemory::ReserveRegion(size_t size) {
void* result = mmap(OS::GetRandomMmapAddr(),
size,
@@ -464,15 +464,8 @@ class Thread::PlatformData : public Malloced {
Thread::Thread(const Options& options)
: data_(new PlatformData),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-
-Thread::Thread(const char* name)
- : data_(new PlatformData),
- stack_size_(0) {
- set_name(name);
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -717,10 +710,18 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
+ static const int kSignalSenderStackSize = 64 * KB;
+
explicit SignalSender(int interval)
- : Thread("SignalSender"),
+ : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
@@ -840,15 +841,29 @@ class SignalSender : public Thread {
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-Mutex* SignalSender::mutex_ = OS::CreateMutex();
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-linux.cc b/src/3rdparty/v8/src/platform-linux.cc
index 451f6fc..9bea32d 100644
--- a/src/3rdparty/v8/src/platform-linux.cc
+++ b/src/3rdparty/v8/src/platform-linux.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -48,7 +48,7 @@
#include <unistd.h> // sysconf
#if defined(__GLIBC__) && !defined(__UCLIBC__)
#include <execinfo.h> // backtrace, backtrace_symbols
-#endif // def __GLIBC__
+#endif // defined(__GLIBC__) && !defined(__UCLIBC__)
#include <strings.h> // index
#include <errno.h>
#include <stdarg.h>
@@ -57,6 +57,7 @@
#include "v8.h"
+#include "platform-posix.h"
#include "platform.h"
#include "v8threads.h"
#include "vm-state-inl.h"
@@ -78,30 +79,8 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-
-#ifdef __arm__
- // When running on ARM hardware check that the EABI used by V8 and
- // by the C code is the same.
- bool hard_float = OS::ArmUsingHardFloat();
- if (hard_float) {
-#if !USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary compiled with -mfloat-abi=hard but without "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- } else {
-#if USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary not compiled with -mfloat-abi=hard but with "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- }
-#endif
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
@@ -187,15 +166,15 @@ bool OS::ArmCpuHasFeature(CpuFeature feature) {
// pair r0, r1 is loaded with 0.0. If -mfloat-abi=hard is pased to GCC then
// calling this will return 1.0 and otherwise 0.0.
static void ArmUsingHardFloatHelper() {
- asm("mov r0, #0");
+ asm("mov r0, #0":::"r0");
#if defined(__VFP_FP__) && !defined(__SOFTFP__)
// Load 0x3ff00000 into r1 using instructions available in both ARM
// and Thumb mode.
- asm("mov r1, #3");
- asm("mov r2, #255");
- asm("lsl r1, r1, #8");
- asm("orr r1, r1, r2");
- asm("lsl r1, r1, #20");
+ asm("mov r1, #3":::"r1");
+ asm("mov r2, #255":::"r2");
+ asm("lsl r1, r1, #8":::"r1");
+ asm("orr r1, r1, r2":::"r1");
+ asm("lsl r1, r1, #20":::"r1");
// For vmov d0, r0, r1 use ARM mode.
#ifdef __thumb__
asm volatile(
@@ -209,12 +188,12 @@ static void ArmUsingHardFloatHelper() {
" adr r3, 2f+1 \n\t"
" bx r3 \n\t"
" .THUMB \n"
- "2: \n\t");
+ "2: \n\t":::"r3");
#else
asm("vmov d0, r0, r1");
#endif // __thumb__
#endif // defined(__VFP_FP__) && !defined(__SOFTFP__)
- asm("mov r1, #0");
+ asm("mov r1, #0":::"r1");
}
@@ -326,7 +305,7 @@ double OS::LocalTimeOffset() {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -388,6 +367,9 @@ void OS::Sleep(int milliseconds) {
void OS::Abort() {
// Redirect to std abort to signal abnormal program termination.
+ if (FLAG_break_on_abort) {
+ DebugBreak();
+ }
abort();
}
@@ -512,7 +494,7 @@ void OS::LogSharedLibraryAddresses() {
}
LOG(isolate, SharedLibraryEvent(lib_name, start, end));
} else {
- // Entry not describing executable data. Skip to end of line to setup
+ // Entry not describing executable data. Skip to end of line to set up
// reading the next entry.
do {
c = getc(fp);
@@ -578,9 +560,9 @@ int OS::StackWalk(Vector<OS::StackFrame> frames) {
free(symbols);
return frames_count;
-#else // ndef __GLIBC__
+#else // defined(__GLIBC__) && !defined(__UCLIBC__)
return 0;
-#endif // ndef __GLIBC__
+#endif // defined(__GLIBC__) && !defined(__UCLIBC__)
}
@@ -666,6 +648,12 @@ bool VirtualMemory::Uncommit(void* address, size_t size) {
}
+bool VirtualMemory::Guard(void* address) {
+ OS::Guard(address, OS::CommitPageSize());
+ return true;
+}
+
+
void* VirtualMemory::ReserveRegion(size_t size) {
void* result = mmap(OS::GetRandomMmapAddr(),
size,
@@ -720,15 +708,8 @@ class Thread::PlatformData : public Malloced {
Thread::Thread(const Options& options)
: data_(new PlatformData()),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-
-Thread::Thread(const char* name)
- : data_(new PlatformData()),
- stack_size_(0) {
- set_name(name);
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -768,7 +749,8 @@ void Thread::Start() {
pthread_attr_setstacksize(&attr, static_cast<size_t>(stack_size_));
attr_ptr = &attr;
}
- pthread_create(&data_->thread_, attr_ptr, ThreadEntry, this);
+ int result = pthread_create(&data_->thread_, attr_ptr, ThreadEntry, this);
+ CHECK_EQ(0, result);
ASSERT(data_->thread_ != kNoThread);
}
@@ -950,6 +932,38 @@ typedef struct ucontext {
} ucontext_t;
enum ArmRegisters {R15 = 15, R13 = 13, R11 = 11};
+#elif !defined(__GLIBC__) && defined(__mips__)
+// MIPS version of sigcontext, for Android bionic.
+struct sigcontext {
+ uint32_t regmask;
+ uint32_t status;
+ uint64_t pc;
+ uint64_t gregs[32];
+ uint64_t fpregs[32];
+ uint32_t acx;
+ uint32_t fpc_csr;
+ uint32_t fpc_eir;
+ uint32_t used_math;
+ uint32_t dsp;
+ uint64_t mdhi;
+ uint64_t mdlo;
+ uint32_t hi1;
+ uint32_t lo1;
+ uint32_t hi2;
+ uint32_t lo2;
+ uint32_t hi3;
+ uint32_t lo3;
+};
+typedef uint32_t __sigset_t;
+typedef struct sigcontext mcontext_t;
+typedef struct ucontext {
+ uint32_t uc_flags;
+ struct ucontext* uc_link;
+ stack_t uc_stack;
+ mcontext_t uc_mcontext;
+ __sigset_t uc_sigmask;
+} ucontext_t;
+
#endif
@@ -964,7 +978,6 @@ static int GetThreadID() {
static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
-#ifndef V8_HOST_ARCH_MIPS
USE(info);
if (signal != SIGPROF) return;
Isolate* isolate = Isolate::UncheckedCurrent();
@@ -1006,15 +1019,14 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
sample->pc = reinterpret_cast<Address>(mcontext.arm_pc);
sample->sp = reinterpret_cast<Address>(mcontext.arm_sp);
sample->fp = reinterpret_cast<Address>(mcontext.arm_fp);
-#endif
+#endif // (__GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ <= 3))
#elif V8_HOST_ARCH_MIPS
- sample.pc = reinterpret_cast<Address>(mcontext.pc);
- sample.sp = reinterpret_cast<Address>(mcontext.gregs[29]);
- sample.fp = reinterpret_cast<Address>(mcontext.gregs[30]);
-#endif
+ sample->pc = reinterpret_cast<Address>(mcontext.pc);
+ sample->sp = reinterpret_cast<Address>(mcontext.gregs[29]);
+ sample->fp = reinterpret_cast<Address>(mcontext.gregs[30]);
+#endif // V8_HOST_ARCH_*
sampler->SampleStack(sample);
sampler->Tick(sample);
-#endif
}
@@ -1036,11 +1048,19 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
+ static const int kSignalSenderStackSize = 64 * KB;
+
explicit SignalSender(int interval)
- : Thread("SignalSender"),
+ : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
vm_tgid_(getpid()),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void InstallSignalHandler() {
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -1152,6 +1172,9 @@ class SignalSender : public Thread {
// occuring during signal delivery.
useconds_t interval = interval_ * 1000 - 100;
if (full_or_half == HALF_INTERVAL) interval /= 2;
+#if defined(ANDROID)
+ usleep(interval);
+#else
int result = usleep(interval);
#ifdef DEBUG
if (result != 0 && errno != EINTR) {
@@ -1161,8 +1184,9 @@ class SignalSender : public Thread {
errno);
ASSERT(result == 0 || errno == EINTR);
}
-#endif
+#endif // DEBUG
USE(result);
+#endif // ANDROID
}
const int vm_tgid_;
@@ -1175,16 +1199,45 @@ class SignalSender : public Thread {
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-Mutex* SignalSender::mutex_ = OS::CreateMutex();
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+
+#ifdef __arm__
+ // When running on ARM hardware check that the EABI used by V8 and
+ // by the C code is the same.
+ bool hard_float = OS::ArmUsingHardFloat();
+ if (hard_float) {
+#if !USE_EABI_HARDFLOAT
+ PrintF("ERROR: Binary compiled with -mfloat-abi=hard but without "
+ "-DUSE_EABI_HARDFLOAT\n");
+ exit(1);
+#endif
+ } else {
+#if USE_EABI_HARDFLOAT
+ PrintF("ERROR: Binary not compiled with -mfloat-abi=hard but with "
+ "-DUSE_EABI_HARDFLOAT\n");
+ exit(1);
+#endif
+ }
+#endif
+ SignalSender::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-macos.cc b/src/3rdparty/v8/src/platform-macos.cc
index 6e5d29d..afcd80a 100644
--- a/src/3rdparty/v8/src/platform-macos.cc
+++ b/src/3rdparty/v8/src/platform-macos.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,6 +58,7 @@
#include "v8.h"
+#include "platform-posix.h"
#include "platform.h"
#include "vm-state-inl.h"
@@ -75,7 +76,7 @@ extern "C" {
namespace v8 {
namespace internal {
-// 0 is never a valid thread id on MacOSX since a ptread_t is
+// 0 is never a valid thread id on MacOSX since a pthread_t is
// a pointer.
static const pthread_t kNoThread = (pthread_t) 0;
@@ -93,17 +94,14 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -429,6 +427,12 @@ bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
}
+bool VirtualMemory::Guard(void* address) {
+ OS::Guard(address, OS::CommitPageSize());
+ return true;
+}
+
+
bool VirtualMemory::CommitRegion(void* address,
size_t size,
bool is_executable) {
@@ -473,17 +477,11 @@ class Thread::PlatformData : public Malloced {
pthread_t thread_; // Thread handle for pthread.
};
-Thread::Thread(const Options& options)
- : data_(new PlatformData),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-Thread::Thread(const char* name)
+Thread::Thread(const Options& options)
: data_(new PlatformData),
- stack_size_(0) {
- set_name(name);
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -736,12 +734,21 @@ class Sampler::PlatformData : public Malloced {
thread_act_t profiled_thread_;
};
+
class SamplerThread : public Thread {
public:
+ static const int kSamplerThreadStackSize = 64 * KB;
+
explicit SamplerThread(int interval)
- : Thread("SamplerThread"),
+ : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
@@ -854,16 +861,26 @@ class SamplerThread : public Thread {
static Mutex* mutex_;
static SamplerThread* instance_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SamplerThread);
};
#undef REGISTER_FIELD
-Mutex* SamplerThread::mutex_ = OS::CreateMutex();
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-nullos.cc b/src/3rdparty/v8/src/platform-nullos.cc
index 8c2a863..42799db 100644
--- a/src/3rdparty/v8/src/platform-nullos.cc
+++ b/src/3rdparty/v8/src/platform-nullos.cc
@@ -55,13 +55,42 @@ double modulo(double x, double y) {
}
+double fast_sin(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_cos(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_tan(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
+double fast_log(double x) {
+ UNIMPLEMENTED();
+ return 0;
+}
+
+
// Initialize OS class early in the V8 startup.
-void OS::Setup() {
+void OS::SetUp() {
// Seed the random number generator.
UNIMPLEMENTED();
}
+void OS::PostSetUp() {
+ UNIMPLEMENTED();
+}
+
+
// Returns the accumulated user time for thread.
int OS::GetUserTime(uint32_t* secs, uint32_t* usecs) {
UNIMPLEMENTED();
@@ -295,6 +324,12 @@ bool VirtualMemory::Uncommit(void* address, size_t size) {
}
+bool VirtualMemory::Guard(void* address) {
+ UNIMPLEMENTED();
+ return false;
+}
+
+
class Thread::PlatformData : public Malloced {
public:
PlatformData() {
diff --git a/src/3rdparty/v8/src/platform-openbsd.cc b/src/3rdparty/v8/src/platform-openbsd.cc
index 3151d18..2b2d530 100644
--- a/src/3rdparty/v8/src/platform-openbsd.cc
+++ b/src/3rdparty/v8/src/platform-openbsd.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,87 +25,105 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Platform specific code for OpenBSD goes here. For the POSIX comaptible parts
-// the implementation is in platform-posix.cc.
+// Platform specific code for OpenBSD and NetBSD goes here. For the POSIX
+// comaptible parts the implementation is in platform-posix.cc.
#include <pthread.h>
#include <semaphore.h>
#include <signal.h>
#include <sys/time.h>
#include <sys/resource.h>
+#include <sys/syscall.h>
#include <sys/types.h>
#include <stdlib.h>
#include <sys/types.h> // mmap & munmap
#include <sys/mman.h> // mmap & munmap
#include <sys/stat.h> // open
-#include <sys/fcntl.h> // open
-#include <unistd.h> // getpagesize
+#include <fcntl.h> // open
+#include <unistd.h> // sysconf
#include <execinfo.h> // backtrace, backtrace_symbols
#include <strings.h> // index
#include <errno.h>
#include <stdarg.h>
-#include <limits.h>
#undef MAP_TYPE
#include "v8.h"
-#include "v8threads.h"
+#include "platform-posix.h"
#include "platform.h"
+#include "v8threads.h"
#include "vm-state-inl.h"
namespace v8 {
namespace internal {
-// 0 is never a valid thread id on OpenBSD since tids and pids share a
-// name space and pid 0 is used to kill the group (see man 2 kill).
+// 0 is never a valid thread id on Linux and OpenBSD since tids and pids share a
+// name space and pid 0 is reserved (see man 2 kill).
static const pthread_t kNoThread = (pthread_t) 0;
double ceiling(double x) {
- // Correct as on OS X
- if (-1.0 < x && x < 0.0) {
- return -0.0;
- } else {
- return ceil(x);
- }
+ return ceil(x);
}
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+static void* GetRandomMmapAddr() {
+ Isolate* isolate = Isolate::UncheckedCurrent();
+ // Note that the current isolate isn't set up in a call path via
+ // CpuFeatures::Probe. We don't care about randomization in this case because
+ // the code page is immediately freed.
+ if (isolate != NULL) {
+#ifdef V8_TARGET_ARCH_X64
+ uint64_t rnd1 = V8::RandomPrivate(isolate);
+ uint64_t rnd2 = V8::RandomPrivate(isolate);
+ uint64_t raw_addr = (rnd1 << 32) ^ rnd2;
+ // Currently available CPUs have 48 bits of virtual addressing. Truncate
+ // the hint address to 46 bits to give the kernel a fighting chance of
+ // fulfilling our placement request.
+ raw_addr &= V8_UINT64_C(0x3ffffffff000);
+#else
+ uint32_t raw_addr = V8::RandomPrivate(isolate);
+ // The range 0x20000000 - 0x60000000 is relatively unpopulated across a
+ // variety of ASLR modes (PAE kernel, NX compat mode, etc).
+ raw_addr &= 0x3ffff000;
+ raw_addr += 0x20000000;
+#endif
+ return reinterpret_cast<void*>(raw_addr);
+ }
+ return NULL;
}
-void OS::ReleaseStore(volatile AtomicWord* ptr, AtomicWord value) {
- __asm__ __volatile__("" : : : "memory");
- *ptr = value;
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
uint64_t OS::CpuFeaturesImpliedByPlatform() {
- return 0; // OpenBSD runs on anything.
+ return 0;
}
int OS::ActivationFrameAlignment() {
- // 16 byte alignment on OpenBSD
+ // With gcc 4.4 the tree vectorization optimizer can generate code
+ // that requires 16 byte alignment such as movdqa on x86.
return 16;
}
+void OS::ReleaseStore(volatile AtomicWord* ptr, AtomicWord value) {
+ __asm__ __volatile__("" : : : "memory");
+ // An x86 store acts as a release barrier.
+ *ptr = value;
+}
+
+
const char* OS::LocalTimezone(double time) {
if (isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
@@ -126,7 +144,7 @@ double OS::LocalTimeOffset() {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -150,19 +168,20 @@ bool OS::IsOutsideAllocatedSpace(void* address) {
size_t OS::AllocateAlignment() {
- return getpagesize();
+ return sysconf(_SC_PAGESIZE);
}
void* OS::Allocate(const size_t requested,
size_t* allocated,
- bool executable) {
- const size_t msize = RoundUp(requested, getpagesize());
- int prot = PROT_READ | PROT_WRITE | (executable ? PROT_EXEC : 0);
- void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0);
-
+ bool is_executable) {
+ const size_t msize = RoundUp(requested, AllocateAlignment());
+ int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
+ void* addr = GetRandomMmapAddr();
+ void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANON, -1, 0);
if (mbase == MAP_FAILED) {
- LOG(ISOLATE, StringEvent("OS::Allocate", "mmap failed"));
+ LOG(i::Isolate::Current(),
+ StringEvent("OS::Allocate", "mmap failed"));
return NULL;
}
*allocated = msize;
@@ -171,9 +190,9 @@ void* OS::Allocate(const size_t requested,
}
-void OS::Free(void* buf, const size_t length) {
+void OS::Free(void* address, const size_t size) {
// TODO(1240712): munmap has a return value which is ignored here.
- int result = munmap(buf, length);
+ int result = munmap(address, size);
USE(result);
ASSERT(result == 0);
}
@@ -192,13 +211,7 @@ void OS::Abort() {
void OS::DebugBreak() {
-#if (defined(__arm__) || defined(__thumb__))
-# if defined(CAN_USE_ARMV5_INSTRUCTIONS)
- asm("bkpt 0");
-# endif
-#else
asm("int $3");
-#endif
}
@@ -250,56 +263,90 @@ PosixMemoryMappedFile::~PosixMemoryMappedFile() {
}
-static unsigned StringToLong(char* buffer) {
- return static_cast<unsigned>(strtol(buffer, NULL, 16)); // NOLINT
-}
-
-
void OS::LogSharedLibraryAddresses() {
- static const int MAP_LENGTH = 1024;
- int fd = open("/proc/self/maps", O_RDONLY);
- if (fd < 0) return;
+ // This function assumes that the layout of the file is as follows:
+ // hex_start_addr-hex_end_addr rwxp <unused data> [binary_file_name]
+ // If we encounter an unexpected situation we abort scanning further entries.
+ FILE* fp = fopen("/proc/self/maps", "r");
+ if (fp == NULL) return;
+
+ // Allocate enough room to be able to store a full file name.
+ const int kLibNameLen = FILENAME_MAX + 1;
+ char* lib_name = reinterpret_cast<char*>(malloc(kLibNameLen));
+
+ i::Isolate* isolate = ISOLATE;
+ // This loop will terminate once the scanning hits an EOF.
while (true) {
- char addr_buffer[11];
- addr_buffer[0] = '0';
- addr_buffer[1] = 'x';
- addr_buffer[10] = 0;
- int result = read(fd, addr_buffer + 2, 8);
- if (result < 8) break;
- unsigned start = StringToLong(addr_buffer);
- result = read(fd, addr_buffer + 2, 1);
- if (result < 1) break;
- if (addr_buffer[2] != '-') break;
- result = read(fd, addr_buffer + 2, 8);
- if (result < 8) break;
- unsigned end = StringToLong(addr_buffer);
- char buffer[MAP_LENGTH];
- int bytes_read = -1;
- do {
- bytes_read++;
- if (bytes_read >= MAP_LENGTH - 1)
- break;
- result = read(fd, buffer + bytes_read, 1);
- if (result < 1) break;
- } while (buffer[bytes_read] != '\n');
- buffer[bytes_read] = 0;
- // Ignore mappings that are not executable.
- if (buffer[3] != 'x') continue;
- char* start_of_path = index(buffer, '/');
- // There may be no filename in this line. Skip to next.
- if (start_of_path == NULL) continue;
- buffer[bytes_read] = 0;
- LOG(i::Isolate::Current(), SharedLibraryEvent(start_of_path, start, end));
+ uintptr_t start, end;
+ char attr_r, attr_w, attr_x, attr_p;
+ // Parse the addresses and permission bits at the beginning of the line.
+ if (fscanf(fp, "%" V8PRIxPTR "-%" V8PRIxPTR, &start, &end) != 2) break;
+ if (fscanf(fp, " %c%c%c%c", &attr_r, &attr_w, &attr_x, &attr_p) != 4) break;
+
+ int c;
+ if (attr_r == 'r' && attr_w != 'w' && attr_x == 'x') {
+ // Found a read-only executable entry. Skip characters until we reach
+ // the beginning of the filename or the end of the line.
+ do {
+ c = getc(fp);
+ } while ((c != EOF) && (c != '\n') && (c != '/'));
+ if (c == EOF) break; // EOF: Was unexpected, just exit.
+
+ // Process the filename if found.
+ if (c == '/') {
+ ungetc(c, fp); // Push the '/' back into the stream to be read below.
+
+ // Read to the end of the line. Exit if the read fails.
+ if (fgets(lib_name, kLibNameLen, fp) == NULL) break;
+
+ // Drop the newline character read by fgets. We do not need to check
+ // for a zero-length string because we know that we at least read the
+ // '/' character.
+ lib_name[strlen(lib_name) - 1] = '\0';
+ } else {
+ // No library name found, just record the raw address range.
+ snprintf(lib_name, kLibNameLen,
+ "%08" V8PRIxPTR "-%08" V8PRIxPTR, start, end);
+ }
+ LOG(isolate, SharedLibraryEvent(lib_name, start, end));
+ } else {
+ // Entry not describing executable data. Skip to end of line to set up
+ // reading the next entry.
+ do {
+ c = getc(fp);
+ } while ((c != EOF) && (c != '\n'));
+ if (c == EOF) break;
+ }
}
- close(fd);
+ free(lib_name);
+ fclose(fp);
}
+static const char kGCFakeMmap[] = "/tmp/__v8_gc__";
+
+
void OS::SignalCodeMovingGC() {
+ // Support for ll_prof.py.
+ //
+ // The Linux profiler built into the kernel logs all mmap's with
+ // PROT_EXEC so that analysis tools can properly attribute ticks. We
+ // do a mmap with a name known by ll_prof.py and immediately munmap
+ // it. This injects a GC marker into the stream of events generated
+ // by the kernel and allows us to synchronize V8 code log and the
+ // kernel log.
+ int size = sysconf(_SC_PAGESIZE);
+ FILE* f = fopen(kGCFakeMmap, "w+");
+ void* addr = mmap(NULL, size, PROT_READ | PROT_EXEC, MAP_PRIVATE,
+ fileno(f), 0);
+ ASSERT(addr != MAP_FAILED);
+ OS::Free(addr, size);
+ fclose(f);
}
int OS::StackWalk(Vector<OS::StackFrame> frames) {
+ // backtrace is a glibc extension.
int frames_size = frames.length();
ScopedVector<void*> addresses(frames_size);
@@ -331,65 +378,146 @@ int OS::StackWalk(Vector<OS::StackFrame> frames) {
static const int kMmapFd = -1;
static const int kMmapFdOffset = 0;
+VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { }
VirtualMemory::VirtualMemory(size_t size) {
- address_ = mmap(NULL, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
- kMmapFd, kMmapFdOffset);
+ address_ = ReserveRegion(size);
size_ = size;
}
+VirtualMemory::VirtualMemory(size_t size, size_t alignment)
+ : address_(NULL), size_(0) {
+ ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment())));
+ size_t request_size = RoundUp(size + alignment,
+ static_cast<intptr_t>(OS::AllocateAlignment()));
+ void* reservation = mmap(GetRandomMmapAddr(),
+ request_size,
+ PROT_NONE,
+ MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
+ kMmapFd,
+ kMmapFdOffset);
+ if (reservation == MAP_FAILED) return;
+
+ Address base = static_cast<Address>(reservation);
+ Address aligned_base = RoundUp(base, alignment);
+ ASSERT_LE(base, aligned_base);
+
+ // Unmap extra memory reserved before and after the desired block.
+ if (aligned_base != base) {
+ size_t prefix_size = static_cast<size_t>(aligned_base - base);
+ OS::Free(base, prefix_size);
+ request_size -= prefix_size;
+ }
+
+ size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
+ ASSERT_LE(aligned_size, request_size);
+
+ if (aligned_size != request_size) {
+ size_t suffix_size = request_size - aligned_size;
+ OS::Free(aligned_base + aligned_size, suffix_size);
+ request_size -= suffix_size;
+ }
+
+ ASSERT(aligned_size == request_size);
+
+ address_ = static_cast<void*>(aligned_base);
+ size_ = aligned_size;
+}
+
+
VirtualMemory::~VirtualMemory() {
if (IsReserved()) {
- OS::Free(address(), size());
- address_ = MAP_FAILED
+ bool result = ReleaseRegion(address(), size());
+ ASSERT(result);
+ USE(result);
}
}
bool VirtualMemory::IsReserved() {
- return address_ != MAP_FAILED;
+ return address_ != NULL;
+}
+
+
+void VirtualMemory::Reset() {
+ address_ = NULL;
+ size_ = 0;
}
-bool VirtualMemory::Commit(void* address, size_t size, bool executable) {
- int prot = PROT_READ | PROT_WRITE | (executable ? PROT_EXEC : 0);
- if (MAP_FAILED == mmap(address, size, prot,
+bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
+ return CommitRegion(address, size, is_executable);
+}
+
+
+bool VirtualMemory::Uncommit(void* address, size_t size) {
+ return UncommitRegion(address, size);
+}
+
+
+bool VirtualMemory::Guard(void* address) {
+ OS::Guard(address, OS::CommitPageSize());
+ return true;
+}
+
+
+void* VirtualMemory::ReserveRegion(size_t size) {
+ void* result = mmap(GetRandomMmapAddr(),
+ size,
+ PROT_NONE,
+ MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
+ kMmapFd,
+ kMmapFdOffset);
+
+ if (result == MAP_FAILED) return NULL;
+
+ return result;
+}
+
+
+bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
+ int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
+ if (MAP_FAILED == mmap(base,
+ size,
+ prot,
MAP_PRIVATE | MAP_ANON | MAP_FIXED,
- kMmapFd, kMmapFdOffset)) {
+ kMmapFd,
+ kMmapFdOffset)) {
return false;
}
- UpdateAllocatedSpaceLimits(address, size);
+ UpdateAllocatedSpaceLimits(base, size);
return true;
}
-bool VirtualMemory::Uncommit(void* address, size_t size) {
- return mmap(address, size, PROT_NONE,
+bool VirtualMemory::UncommitRegion(void* base, size_t size) {
+ return mmap(base,
+ size,
+ PROT_NONE,
MAP_PRIVATE | MAP_ANON | MAP_NORESERVE | MAP_FIXED,
- kMmapFd, kMmapFdOffset) != MAP_FAILED;
+ kMmapFd,
+ kMmapFdOffset) != MAP_FAILED;
+}
+
+
+bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
+ return munmap(base, size) == 0;
}
class Thread::PlatformData : public Malloced {
public:
+ PlatformData() : thread_(kNoThread) {}
+
pthread_t thread_; // Thread handle for pthread.
};
-
Thread::Thread(const Options& options)
- : data_(new PlatformData),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-
-Thread::Thread(const char* name)
- : data_(new PlatformData),
- stack_size_(0) {
- set_name(name);
+ : data_(new PlatformData()),
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -403,6 +531,11 @@ static void* ThreadEntry(void* arg) {
// This is also initialized by the first argument to pthread_create() but we
// don't know which thread will run first (the original thread or the new
// one) so we initialize it here too.
+#ifdef PR_SET_NAME
+ prctl(PR_SET_NAME,
+ reinterpret_cast<unsigned long>(thread->name()), // NOLINT
+ 0, 0, 0);
+#endif
thread->data()->thread_ = pthread_self();
ASSERT(thread->data()->thread_ != kNoThread);
thread->Run();
@@ -478,6 +611,7 @@ class OpenBSDMutex : public Mutex {
ASSERT(result == 0);
result = pthread_mutex_init(&mutex_, &attrs);
ASSERT(result == 0);
+ USE(result);
}
virtual ~OpenBSDMutex() { pthread_mutex_destroy(&mutex_); }
@@ -534,6 +668,14 @@ void OpenBSDSemaphore::Wait() {
}
+#ifndef TIMEVAL_TO_TIMESPEC
+#define TIMEVAL_TO_TIMESPEC(tv, ts) do { \
+ (ts)->tv_sec = (tv)->tv_sec; \
+ (ts)->tv_nsec = (tv)->tv_usec * 1000; \
+} while (false)
+#endif
+
+
bool OpenBSDSemaphore::Wait(int timeout) {
const long kOneSecondMicros = 1000000; // NOLINT
@@ -567,29 +709,15 @@ bool OpenBSDSemaphore::Wait(int timeout) {
}
}
-
Semaphore* OS::CreateSemaphore(int count) {
return new OpenBSDSemaphore(count);
}
static pthread_t GetThreadID() {
- pthread_t thread_id = pthread_self();
- return thread_id;
+ return pthread_self();
}
-
-class Sampler::PlatformData : public Malloced {
- public:
- PlatformData() : vm_tid_(GetThreadID()) {}
-
- pthread_t vm_tid() const { return vm_tid_; }
-
- private:
- pthread_t vm_tid_;
-};
-
-
static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
USE(info);
if (signal != SIGPROF) return;
@@ -611,8 +739,20 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
if (sample == NULL) sample = &sample_obj;
// Extracting the sample from the context is extremely machine dependent.
- ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
sample->state = isolate->current_vm_state();
+ ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
+#ifdef __NetBSD__
+ mcontext_t& mcontext = ucontext->uc_mcontext;
+#if V8_HOST_ARCH_IA32
+ sample->pc = reinterpret_cast<Address>(mcontext.__gregs[_REG_EIP]);
+ sample->sp = reinterpret_cast<Address>(mcontext.__gregs[_REG_ESP]);
+ sample->fp = reinterpret_cast<Address>(mcontext.__gregs[_REG_EBP]);
+#elif V8_HOST_ARCH_X64
+ sample->pc = reinterpret_cast<Address>(mcontext.__gregs[_REG_RIP]);
+ sample->sp = reinterpret_cast<Address>(mcontext.__gregs[_REG_RSP]);
+ sample->fp = reinterpret_cast<Address>(mcontext.__gregs[_REG_RBP]);
+#endif // V8_HOST_ARCH
+#else // OpenBSD
#if V8_HOST_ARCH_IA32
sample->pc = reinterpret_cast<Address>(ucontext->sc_eip);
sample->sp = reinterpret_cast<Address>(ucontext->sc_esp);
@@ -621,16 +761,24 @@ static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
sample->pc = reinterpret_cast<Address>(ucontext->sc_rip);
sample->sp = reinterpret_cast<Address>(ucontext->sc_rsp);
sample->fp = reinterpret_cast<Address>(ucontext->sc_rbp);
-#elif V8_HOST_ARCH_ARM
- sample->pc = reinterpret_cast<Address>(ucontext->sc_r15);
- sample->sp = reinterpret_cast<Address>(ucontext->sc_r13);
- sample->fp = reinterpret_cast<Address>(ucontext->sc_r11);
-#endif
+#endif // V8_HOST_ARCH
+#endif // __NetBSD__
sampler->SampleStack(sample);
sampler->Tick(sample);
}
+class Sampler::PlatformData : public Malloced {
+ public:
+ PlatformData() : vm_tid_(GetThreadID()) {}
+
+ pthread_t vm_tid() const { return vm_tid_; }
+
+ private:
+ pthread_t vm_tid_;
+};
+
+
class SignalSender : public Thread {
public:
enum SleepInterval {
@@ -638,23 +786,41 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
+ static const int kSignalSenderStackSize = 64 * KB;
+
explicit SignalSender(int interval)
- : Thread("SignalSender"),
+ : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
+ vm_tgid_(getpid()),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
+ static void InstallSignalHandler() {
+ struct sigaction sa;
+ sa.sa_sigaction = ProfilerSignalHandler;
+ sigemptyset(&sa.sa_mask);
+ sa.sa_flags = SA_RESTART | SA_SIGINFO;
+ signal_handler_installed_ =
+ (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
+ }
+
+ static void RestoreSignalHandler() {
+ if (signal_handler_installed_) {
+ sigaction(SIGPROF, &old_signal_handler_, 0);
+ signal_handler_installed_ = false;
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
if (instance_ == NULL) {
- // Install a signal handler.
- struct sigaction sa;
- sa.sa_sigaction = ProfilerSignalHandler;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_RESTART | SA_SIGINFO;
- signal_handler_installed_ =
- (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
-
- // Start a thread that sends SIGPROF signal to VM threads.
+ // Start a thread that will send SIGPROF signal to VM threads,
+ // when CPU profiling will be enabled.
instance_ = new SignalSender(sampler->interval());
instance_->Start();
} else {
@@ -669,12 +835,7 @@ class SignalSender : public Thread {
RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
delete instance_;
instance_ = NULL;
-
- // Restore the old signal handler.
- if (signal_handler_installed_) {
- sigaction(SIGPROF, &old_signal_handler_, 0);
- signal_handler_installed_ = false;
- }
+ RestoreSignalHandler();
}
}
@@ -686,6 +847,11 @@ class SignalSender : public Thread {
bool cpu_profiling_enabled =
(state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
+ if (cpu_profiling_enabled && !signal_handler_installed_) {
+ InstallSignalHandler();
+ } else if (!cpu_profiling_enabled && signal_handler_installed_) {
+ RestoreSignalHandler();
+ }
// When CPU profiling is enabled both JavaScript and C++ code is
// profiled. We must not suspend.
if (!cpu_profiling_enabled) {
@@ -752,6 +918,7 @@ class SignalSender : public Thread {
USE(result);
}
+ const int vm_tgid_;
const int interval_;
RuntimeProfilerRateLimiter rate_limiter_;
@@ -761,15 +928,26 @@ class SignalSender : public Thread {
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-Mutex* SignalSender::mutex_ = OS::CreateMutex();
+
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator. We preserve microsecond resolution.
+ uint64_t seed = Ticks() ^ (getpid() << 16);
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-posix.cc b/src/3rdparty/v8/src/platform-posix.cc
index 78fece3..6631659 100644
--- a/src/3rdparty/v8/src/platform-posix.cc
+++ b/src/3rdparty/v8/src/platform-posix.cc
@@ -29,6 +29,8 @@
// own but contains the parts which are the same across POSIX platforms Linux,
// Mac OS, FreeBSD and OpenBSD.
+#include "platform-posix.h"
+
#include <unistd.h>
#include <errno.h>
#include <time.h>
@@ -46,13 +48,14 @@
#undef MAP_TYPE
-#if defined(ANDROID)
+#if defined(ANDROID) && !defined(V8_ANDROID_LOG_STDOUT)
#define LOG_TAG "v8"
-#include <utils/Log.h> // LOG_PRI_VA
+#include <android/log.h>
#endif
#include "v8.h"
+#include "codegen.h"
#include "platform.h"
namespace v8 {
@@ -70,6 +73,12 @@ intptr_t OS::MaxVirtualMemory() {
}
+intptr_t OS::CommitPageSize() {
+ static intptr_t page_size = getpagesize();
+ return page_size;
+}
+
+
#ifndef __CYGWIN__
// Get rid of writable permission on code allocations.
void OS::ProtectCode(void* address, const size_t size) {
@@ -120,6 +129,24 @@ double modulo(double x, double y) {
}
+#define UNARY_MATH_FUNCTION(name, generator) \
+static UnaryMathFunction fast_##name##_function = NULL; \
+void init_fast_##name##_function() { \
+ fast_##name##_function = generator; \
+} \
+double fast_##name(double x) { \
+ return (*fast_##name##_function)(x); \
+}
+
+UNARY_MATH_FUNCTION(sin, CreateTranscendentalFunction(TranscendentalCache::SIN))
+UNARY_MATH_FUNCTION(cos, CreateTranscendentalFunction(TranscendentalCache::COS))
+UNARY_MATH_FUNCTION(tan, CreateTranscendentalFunction(TranscendentalCache::TAN))
+UNARY_MATH_FUNCTION(log, CreateTranscendentalFunction(TranscendentalCache::LOG))
+UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction())
+
+#undef MATH_FUNCTION
+
+
double OS::nan_value() {
// NAN from math.h is defined in C99 and not in POSIX.
return NAN;
@@ -210,7 +237,7 @@ void OS::Print(const char* format, ...) {
void OS::VPrint(const char* format, va_list args) {
#if defined(ANDROID) && !defined(V8_ANDROID_LOG_STDOUT)
- LOG_PRI_VA(ANDROID_LOG_INFO, LOG_TAG, format, args);
+ __android_log_vprint(ANDROID_LOG_INFO, LOG_TAG, format, args);
#else
vprintf(format, args);
#endif
@@ -227,7 +254,7 @@ void OS::FPrint(FILE* out, const char* format, ...) {
void OS::VFPrint(FILE* out, const char* format, va_list args) {
#if defined(ANDROID) && !defined(V8_ANDROID_LOG_STDOUT)
- LOG_PRI_VA(ANDROID_LOG_INFO, LOG_TAG, format, args);
+ __android_log_vprint(ANDROID_LOG_INFO, LOG_TAG, format, args);
#else
vfprintf(out, format, args);
#endif
@@ -244,7 +271,7 @@ void OS::PrintError(const char* format, ...) {
void OS::VPrintError(const char* format, va_list args) {
#if defined(ANDROID) && !defined(V8_ANDROID_LOG_STDOUT)
- LOG_PRI_VA(ANDROID_LOG_ERROR, LOG_TAG, format, args);
+ __android_log_vprint(ANDROID_LOG_ERROR, LOG_TAG, format, args);
#else
vfprintf(stderr, format, args);
#endif
@@ -277,20 +304,11 @@ int OS::VSNPrintF(Vector<char> str,
#if defined(V8_TARGET_ARCH_IA32)
static OS::MemCopyFunction memcopy_function = NULL;
-static Mutex* memcopy_function_mutex = OS::CreateMutex();
// Defined in codegen-ia32.cc.
OS::MemCopyFunction CreateMemCopyFunction();
// Copy memory area to disjoint memory area.
void OS::MemCopy(void* dest, const void* src, size_t size) {
- if (memcopy_function == NULL) {
- ScopedLock lock(memcopy_function_mutex);
- if (memcopy_function == NULL) {
- OS::MemCopyFunction temp = CreateMemCopyFunction();
- MemoryBarrier();
- memcopy_function = temp;
- }
- }
// Note: here we rely on dependent reads being ordered. This is true
// on all architectures we currently support.
(*memcopy_function)(dest, src, size);
@@ -300,6 +318,18 @@ void OS::MemCopy(void* dest, const void* src, size_t size) {
}
#endif // V8_TARGET_ARCH_IA32
+
+void POSIXPostSetUp() {
+#if defined(V8_TARGET_ARCH_IA32)
+ memcopy_function = CreateMemCopyFunction();
+#endif
+ init_fast_sin_function();
+ init_fast_cos_function();
+ init_fast_tan_function();
+ init_fast_log_function();
+ init_fast_sqrt_function();
+}
+
// ----------------------------------------------------------------------------
// POSIX string support.
//
@@ -455,7 +485,7 @@ bool POSIXSocket::SetReuseAddress(bool reuse_address) {
}
-bool Socket::Setup() {
+bool Socket::SetUp() {
// Nothing to do on POSIX.
return true;
}
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-natives.h b/src/3rdparty/v8/src/platform-posix.h
index 37362d0..7a982ed 100644
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-natives.h
+++ b/src/3rdparty/v8/src/platform-posix.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,19 +25,15 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_NATIVES_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_I18N_NATIVES_H_
+#ifndef V8_PLATFORM_POSIX_H_
+#define V8_PLATFORM_POSIX_H_
namespace v8 {
namespace internal {
-class I18Natives {
- public:
- // Gets script source from generated file.
- // Source is statically allocated string.
- static const char* GetScriptSource();
-};
+// Used by platform implementation files during OS::PostSetUp().
+void POSIXPostSetUp();
} } // namespace v8::internal
-#endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_NATIVES_H_
+#endif // V8_PLATFORM_POSIX_H_
diff --git a/src/3rdparty/v8/src/platform-qnx.cc b/src/3rdparty/v8/src/platform-qnx.cc
deleted file mode 100644
index 95cb120..0000000
--- a/src/3rdparty/v8/src/platform-qnx.cc
+++ /dev/null
@@ -1,1072 +0,0 @@
-// Copyright 2012 Research in Motion. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Platform specific code for QNX goes here. For the POSIX comaptible parts
-// the implementation is in platform-posix.cc.
-
-#include <pthread.h>
-#include <semaphore.h>
-#include <signal.h>
-#include <sys/time.h>
-#include <sys/resource.h>
-#include <sys/types.h>
-#include <stdlib.h>
-#include <ucontext.h>
-#include <backtrace.h>
-
-// QNX requires memory pages to be marked as
-// executable. Otherwise, OS raises an exception when executing code
-// in that page.
-#include <sys/types.h> // mmap & munmap
-#include <sys/mman.h> // mmap & munmap
-#include <sys/stat.h> // open
-#include <fcntl.h> // open
-#include <unistd.h> // sysconf
-#include <strings.h> // index
-#include <errno.h>
-#include <stdarg.h>
-#include <sys/procfs.h>
-#include <sys/syspage.h>
-
-#undef MAP_TYPE
-
-#include "v8.h"
-
-#include "platform.h"
-#include "v8threads.h"
-#include "vm-state-inl.h"
-
-
-namespace v8 {
-namespace internal {
-
-// 0 is never a valid thread id on QNX since tids and pids share a
-// name space and pid 0 is reserved (see man 2 kill).
-static const pthread_t kNoThread = (pthread_t) 0;
-
-
-double ceiling(double x) {
- return ceil(x);
-}
-
-
-static Mutex* limit_mutex = NULL;
-
-
-void OS::Setup() {
- // Seed the random number generator. We preserve microsecond resolution.
- uint64_t seed = Ticks() ^ (getpid() << 16);
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
-
-#ifdef __arm__
- // When running on ARM hardware check that the EABI used by V8 and
- // by the C code is the same.
- bool hard_float = OS::ArmUsingHardFloat();
- if (hard_float) {
-#if !USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary compiled with -mfloat-abi=hard but without "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- } else {
-#if USE_EABI_HARDFLOAT
- PrintF("ERROR: Binary not compiled with -mfloat-abi=hard but with "
- "-DUSE_EABI_HARDFLOAT\n");
- exit(1);
-#endif
- }
-#endif
-}
-
-
-uint64_t OS::CpuFeaturesImpliedByPlatform() {
- return 0; // QNX runs on anything.
-}
-
-
-#ifdef __arm__
-static bool CPUInfoContainsString(const char * search_string) {
- const char* file_name = "/proc/cpuinfo";
- // This is written as a straight shot one pass parser
- // and not using STL string and ifstream because,
- // on QNX, it's reading from a (non-mmap-able)
- // character special device.
- FILE* f = NULL;
- const char* what = search_string;
-
- if (NULL == (f = fopen(file_name, "r")))
- return false;
-
- int k;
- while (EOF != (k = fgetc(f))) {
- if (k == *what) {
- ++what;
- while ((*what != '\0') && (*what == fgetc(f))) {
- ++what;
- }
- if (*what == '\0') {
- fclose(f);
- return true;
- } else {
- what = search_string;
- }
- }
- }
- fclose(f);
-
- // Did not find string in the proc file.
- return false;
-}
-
-
-bool OS::ArmCpuHasFeature(CpuFeature feature) {
- switch (feature) {
- case VFP3:
- // All shipping devices currently support this and QNX has no easy way to
- // determine this at runtime.
- return true;
- case ARMv7:
- return (SYSPAGE_ENTRY(cpuinfo)->flags & ARM_CPU_FLAG_V7) != 0;
- default:
- UNREACHABLE();
- }
-
- return false;
-}
-
-
-// Simple helper function to detect whether the C code is compiled with
-// option -mfloat-abi=hard. The register d0 is loaded with 1.0 and the register
-// pair r0, r1 is loaded with 0.0. If -mfloat-abi=hard is pased to GCC then
-// calling this will return 1.0 and otherwise 0.0.
-static void ArmUsingHardFloatHelper() {
- asm("mov r0, #0");
-#if defined(__VFP_FP__) && !defined(__SOFTFP__)
- // Load 0x3ff00000 into r1 using instructions available in both ARM
- // and Thumb mode.
- asm("mov r1, #3");
- asm("mov r2, #255");
- asm("lsl r1, r1, #8");
- asm("orr r1, r1, r2");
- asm("lsl r1, r1, #20");
- // For vmov d0, r0, r1 use ARM mode.
-#ifdef __thumb__
- asm volatile(
- "@ Enter ARM Mode \n\t"
- " adr r3, 1f \n\t"
- " bx r3 \n\t"
- " .ALIGN 4 \n\t"
- " .ARM \n"
- "1: vmov d0, r0, r1 \n\t"
- "@ Enter THUMB Mode\n\t"
- " adr r3, 2f+1 \n\t"
- " bx r3 \n\t"
- " .THUMB \n"
- "2: \n\t");
-#else
- asm("vmov d0, r0, r1");
-#endif // __thumb__
-#endif // defined(__VFP_FP__) && !defined(__SOFTFP__)
- asm("mov r1, #0");
-}
-
-
-bool OS::ArmUsingHardFloat() {
- // Cast helper function from returning void to returning double.
- typedef double (*F)();
- F f = FUNCTION_CAST<F>(FUNCTION_ADDR(ArmUsingHardFloatHelper));
- return f() == 1.0;
-}
-#endif // def __arm__
-
-
-int OS::ActivationFrameAlignment() {
-#ifdef V8_TARGET_ARCH_ARM
- // On EABI ARM targets this is required for fp correctness in the
- // runtime system.
- return 8;
-#endif
- // With gcc 4.4 the tree vectorization optimizer can generate code
- // that requires 16 byte alignment such as movdqa on x86.
- return 16;
-}
-
-
-void OS::ReleaseStore(volatile AtomicWord* ptr, AtomicWord value) {
-#if defined(V8_TARGET_ARCH_ARM) && defined(__arm__)
- // Only use on ARM hardware.
- MemoryBarrier();
-#else
- __asm__ __volatile__("" : : : "memory");
- // An x86 store acts as a release barrier.
-#endif
- *ptr = value;
-}
-
-
-const char* OS::LocalTimezone(double time) {
- if (isnan(time)) return "";
- time_t tv = static_cast<time_t>(floor(time/msPerSecond));
- struct tm* t = localtime(&tv);
- if (NULL == t) return "";
- return t->tm_zone;
-}
-
-
-double OS::LocalTimeOffset() {
- time_t tv = time(NULL);
- struct tm* t = localtime(&tv);
- // tm_gmtoff includes any daylight savings offset, so subtract it.
- return static_cast<double>(t->tm_gmtoff * msPerSecond -
- (t->tm_isdst > 0 ? 3600 * msPerSecond : 0));
-}
-
-
-// We keep the lowest and highest addresses mapped as a quick way of
-// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
-// 'allocated' space are actually allocated to our heap. The range is
-// [lowest, highest), inclusive on the low and and exclusive on the high end.
-static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
-static void* highest_ever_allocated = reinterpret_cast<void*>(0);
-
-
-static void UpdateAllocatedSpaceLimits(void* address, int size) {
- ASSERT(limit_mutex != NULL);
- ScopedLock lock(limit_mutex);
-
- lowest_ever_allocated = Min(lowest_ever_allocated, address);
- highest_ever_allocated =
- Max(highest_ever_allocated,
- reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
-}
-
-
-bool OS::IsOutsideAllocatedSpace(void* address) {
- return address < lowest_ever_allocated || address >= highest_ever_allocated;
-}
-
-
-size_t OS::AllocateAlignment() {
- return sysconf(_SC_PAGESIZE);
-}
-
-
-void* OS::Allocate(const size_t requested,
- size_t* allocated,
- bool is_executable) {
- const size_t msize = RoundUp(requested, sysconf(_SC_PAGESIZE));
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- void* addr = GetRandomMmapAddr();
- void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
- if (mbase == MAP_FAILED) {
- LOG(i::Isolate::Current(),
- StringEvent("OS::Allocate", "mmap failed"));
- return NULL;
- }
- *allocated = msize;
- UpdateAllocatedSpaceLimits(mbase, msize);
- return mbase;
-}
-
-
-void OS::Free(void* address, const size_t size) {
- // TODO(1240712): munmap has a return value which is ignored here.
- int result = munmap(address, size);
- USE(result);
- ASSERT(result == 0);
-}
-
-
-void OS::Sleep(int milliseconds) {
- unsigned int ms = static_cast<unsigned int>(milliseconds);
- usleep(1000 * ms);
-}
-
-
-void OS::Abort() {
- // Redirect to std abort to signal abnormal program termination.
- abort();
-}
-
-
-void OS::DebugBreak() {
-// TODO(lrn): Introduce processor define for runtime system (!= V8_ARCH_x,
-// which is the architecture of generated code).
-#if (defined(__arm__) || defined(__thumb__))
-# if defined(CAN_USE_ARMV5_INSTRUCTIONS)
- asm("bkpt 0");
-# endif
-#else
- asm("int $3");
-#endif
-}
-
-
-class PosixMemoryMappedFile : public OS::MemoryMappedFile {
- public:
- PosixMemoryMappedFile(FILE* file, void* memory, int size)
- : file_(file), memory_(memory), size_(size) { }
- virtual ~PosixMemoryMappedFile();
- virtual void* memory() { return memory_; }
- virtual int size() { return size_; }
- private:
- FILE* file_;
- void* memory_;
- int size_;
-};
-
-
-OS::MemoryMappedFile* OS::MemoryMappedFile::open(const char* name) {
- FILE* file = fopen(name, "r+");
- if (file == NULL) return NULL;
-
- fseek(file, 0, SEEK_END);
- int size = ftell(file);
-
- void* memory =
- mmap(OS::GetRandomMmapAddr(),
- size,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- fileno(file),
- 0);
- return new PosixMemoryMappedFile(file, memory, size);
-}
-
-
-OS::MemoryMappedFile* OS::MemoryMappedFile::create(const char* name, int size,
- void* initial) {
- FILE* file = fopen(name, "w+");
- if (file == NULL) return NULL;
- int result = fwrite(initial, size, 1, file);
- if (result < 1) {
- fclose(file);
- return NULL;
- }
- void* memory =
- mmap(OS::GetRandomMmapAddr(),
- size,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- fileno(file),
- 0);
- return new PosixMemoryMappedFile(file, memory, size);
-}
-
-
-PosixMemoryMappedFile::~PosixMemoryMappedFile() {
- if (memory_) munmap(memory_, size_);
- fclose(file_);
-}
-
-
-void OS::LogSharedLibraryAddresses() {
- procfs_mapinfo *mapinfos = NULL, *mapinfo;
- int proc_fd, num, i;
-
- struct {
- procfs_debuginfo info;
- char buff[PATH_MAX];
- } map;
-
- char buf[PATH_MAX + 1];
- sprintf(buf, "/proc/%d/as", getpid());
-
- if ((proc_fd = open(buf, O_RDONLY)) == -1) {
- close(proc_fd);
- return;
- }
-
- /* Get the number of map entrys. */
- if (devctl(proc_fd, DCMD_PROC_MAPINFO, NULL, 0, &num) != EOK) {
- close(proc_fd);
- return;
- }
-
- mapinfos =(procfs_mapinfo *)malloc(num * sizeof(procfs_mapinfo));
- if (mapinfos == NULL) {
- close(proc_fd);
- return;
- }
-
- /* Fill the map entrys. */
- if (devctl(proc_fd, DCMD_PROC_PAGEDATA, mapinfos, num * sizeof(procfs_mapinfo), &num) != EOK) {
- free(mapinfos);
- close(proc_fd);
- return;
- }
-
- i::Isolate* isolate = ISOLATE;
-
- for (i = 0; i < num; i++) {
- mapinfo = mapinfos + i;
- if (mapinfo->flags & MAP_ELF) {
- map.info.vaddr = mapinfo->vaddr;
- if (devctl(proc_fd, DCMD_PROC_MAPDEBUG, &map, sizeof(map), 0) != EOK)
- continue;
-
- LOG(isolate, SharedLibraryEvent(map.info.path, mapinfo->vaddr, mapinfo->vaddr + mapinfo->size));
- }
- }
- free(mapinfos);
- close(proc_fd);
-}
-
-
-static const char kGCFakeMmap[] = "/tmp/__v8_gc__";
-
-
-void OS::SignalCodeMovingGC() {
- // Support for ll_prof.py.
- //
- // The QNX profiler built into the kernel logs all mmap's with
- // PROT_EXEC so that analysis tools can properly attribute ticks. We
- // do a mmap with a name known by ll_prof.py and immediately munmap
- // it. This injects a GC marker into the stream of events generated
- // by the kernel and allows us to synchronize V8 code log and the
- // kernel log.
- int size = sysconf(_SC_PAGESIZE);
- FILE* f = fopen(kGCFakeMmap, "w+");
- void* addr = mmap(OS::GetRandomMmapAddr(),
- size,
- PROT_READ | PROT_EXEC,
- MAP_PRIVATE,
- fileno(f),
- 0);
- ASSERT(addr != MAP_FAILED);
- munmap(addr, size);
- fclose(f);
-}
-
-
-int OS::StackWalk(Vector<OS::StackFrame> frames) {
- int frames_size = frames.length();
- bt_addr_t addresses[frames_size];
- bt_accessor_t acc;
- bt_memmap_t memmap;
- bt_init_accessor(&acc, BT_SELF);
- bt_load_memmap(&acc, &memmap);
- int frames_count = bt_get_backtrace(&acc, addresses, frames_size);
- bt_addr_t temp_addr[1];
- for (int i = 0; i < frames_count; i++) {
- frames[i].address = reinterpret_cast<void*>(addresses[i]);
- temp_addr[0] = addresses[i];
- // Format a text representation of the frame based on the information
- // available.
- bt_sprnf_addrs(&memmap, temp_addr, 1, "%a", frames[i].text, kStackWalkMaxTextLen, 0);
- // Make sure line termination is in place.
- frames[i].text[kStackWalkMaxTextLen - 1] = '\0';
- }
- bt_unload_memmap(&memmap);
- bt_release_accessor(&acc);
- return 0;
-}
-
-
-// Constants used for mmap.
-static const int kMmapFd = -1;
-static const int kMmapFdOffset = 0;
-
-VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { }
-
-VirtualMemory::VirtualMemory(size_t size) {
- address_ = ReserveRegion(size);
- size_ = size;
-}
-
-
-VirtualMemory::VirtualMemory(size_t size, size_t alignment)
- : address_(NULL), size_(0) {
- ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment())));
- size_t request_size = RoundUp(size + alignment,
- static_cast<intptr_t>(OS::AllocateAlignment()));
- void* reservation = mmap(OS::GetRandomMmapAddr(),
- request_size,
- PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS,
- kMmapFd,
- kMmapFdOffset);
- if (reservation == MAP_FAILED) return;
-
- Address base = static_cast<Address>(reservation);
- Address aligned_base = RoundUp(base, alignment);
- ASSERT_LE(base, aligned_base);
-
- // Unmap extra memory reserved before and after the desired block.
- if (aligned_base != base) {
- size_t prefix_size = static_cast<size_t>(aligned_base - base);
- OS::Free(base, prefix_size);
- request_size -= prefix_size;
- }
-
- size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
- ASSERT_LE(aligned_size, request_size);
-
- if (aligned_size != request_size) {
- size_t suffix_size = request_size - aligned_size;
- OS::Free(aligned_base + aligned_size, suffix_size);
- request_size -= suffix_size;
- }
-
- ASSERT(aligned_size == request_size);
-
- address_ = static_cast<void*>(aligned_base);
- size_ = aligned_size;
-}
-
-
-VirtualMemory::~VirtualMemory() {
- if (IsReserved()) {
- bool result = ReleaseRegion(address(), size());
- ASSERT(result);
- USE(result);
- }
-}
-
-
-bool VirtualMemory::IsReserved() {
- return address_ != NULL;
-}
-
-
-void VirtualMemory::Reset() {
- address_ = NULL;
- size_ = 0;
-}
-
-
-bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
- return CommitRegion(address, size, is_executable);
-}
-
-
-bool VirtualMemory::Uncommit(void* address, size_t size) {
- return UncommitRegion(address, size);
-}
-
-
-void* VirtualMemory::ReserveRegion(size_t size) {
- void* result = mmap(OS::GetRandomMmapAddr(),
- size,
- PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS,
- kMmapFd,
- kMmapFdOffset);
-
- if (result == MAP_FAILED) return NULL;
-
- return result;
-}
-
-
-bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
- int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
- if (MAP_FAILED == mmap(base,
- size,
- prot,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED,
- kMmapFd,
- kMmapFdOffset)) {
- return false;
- }
-
- UpdateAllocatedSpaceLimits(base, size);
- return true;
-}
-
-
-bool VirtualMemory::UncommitRegion(void* base, size_t size) {
- return mmap(base,
- size,
- PROT_NONE,
- MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED,
- kMmapFd,
- kMmapFdOffset) != MAP_FAILED;
-}
-
-
-bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
- return munmap(base, size) == 0;
-}
-
-
-class Thread::PlatformData : public Malloced {
- public:
- PlatformData() : thread_(kNoThread) {}
-
- pthread_t thread_; // Thread handle for pthread.
-};
-
-Thread::Thread(const Options& options)
- : data_(new PlatformData()),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-Thread::Thread(const char* name)
- : data_(new PlatformData()),
- stack_size_(0) {
- set_name(name);
-}
-
-
-Thread::~Thread() {
- delete data_;
-}
-
-
-static void* ThreadEntry(void* arg) {
- Thread* thread = reinterpret_cast<Thread*>(arg);
- // This is also initialized by the first argument to pthread_create() but we
- // don't know which thread will run first (the original thread or the new
- // one) so we initialize it here too.
-#ifdef PR_SET_NAME
- prctl(PR_SET_NAME,
- reinterpret_cast<unsigned long>(thread->name()), // NOLINT
- 0, 0, 0);
-#endif
- thread->data()->thread_ = pthread_self();
- ASSERT(thread->data()->thread_ != kNoThread);
- thread->Run();
- return NULL;
-}
-
-
-void Thread::set_name(const char* name) {
- strncpy(name_, name, sizeof(name_));
- name_[sizeof(name_) - 1] = '\0';
-}
-
-
-void Thread::Start() {
- pthread_attr_t* attr_ptr = NULL;
- pthread_attr_t attr;
- if (stack_size_ > 0) {
- pthread_attr_init(&attr);
- pthread_attr_setstacksize(&attr, static_cast<size_t>(stack_size_));
- attr_ptr = &attr;
- }
- int result = pthread_create(&data_->thread_, attr_ptr, ThreadEntry, this);
- CHECK_EQ(0, result);
- ASSERT(data_->thread_ != kNoThread);
-}
-
-
-void Thread::Join() {
- pthread_join(data_->thread_, NULL);
-}
-
-
-Thread::LocalStorageKey Thread::CreateThreadLocalKey() {
- pthread_key_t key;
- int result = pthread_key_create(&key, NULL);
- USE(result);
- ASSERT(result == 0);
- return static_cast<LocalStorageKey>(key);
-}
-
-
-void Thread::DeleteThreadLocalKey(LocalStorageKey key) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- int result = pthread_key_delete(pthread_key);
- USE(result);
- ASSERT(result == 0);
-}
-
-
-void* Thread::GetThreadLocal(LocalStorageKey key) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- return pthread_getspecific(pthread_key);
-}
-
-
-void Thread::SetThreadLocal(LocalStorageKey key, void* value) {
- pthread_key_t pthread_key = static_cast<pthread_key_t>(key);
- pthread_setspecific(pthread_key, value);
-}
-
-
-void Thread::YieldCPU() {
- sched_yield();
-}
-
-
-class QNXMutex : public Mutex {
- public:
- QNXMutex() {
- pthread_mutexattr_t attrs;
- int result = pthread_mutexattr_init(&attrs);
- ASSERT(result == 0);
- result = pthread_mutexattr_settype(&attrs, PTHREAD_MUTEX_RECURSIVE);
- ASSERT(result == 0);
- result = pthread_mutex_init(&mutex_, &attrs);
- ASSERT(result == 0);
- USE(result);
- }
-
- virtual ~QNXMutex() { pthread_mutex_destroy(&mutex_); }
-
- virtual int Lock() {
- int result = pthread_mutex_lock(&mutex_);
- return result;
- }
-
- virtual int Unlock() {
- int result = pthread_mutex_unlock(&mutex_);
- return result;
- }
-
- virtual bool TryLock() {
- int result = pthread_mutex_trylock(&mutex_);
- // Return false if the lock is busy and locking failed.
- if (result == EBUSY) {
- return false;
- }
- ASSERT(result == 0); // Verify no other errors.
- return true;
- }
-
- private:
- pthread_mutex_t mutex_; // Pthread mutex for POSIX platforms.
-};
-
-
-Mutex* OS::CreateMutex() {
- return new QNXMutex();
-}
-
-
-class QNXSemaphore : public Semaphore {
- public:
- explicit QNXSemaphore(int count) { sem_init(&sem_, 0, count); }
- virtual ~QNXSemaphore() { sem_destroy(&sem_); }
-
- virtual void Wait();
- virtual bool Wait(int timeout);
- virtual void Signal() { sem_post(&sem_); }
- private:
- sem_t sem_;
-};
-
-
-void QNXSemaphore::Wait() {
- while (true) {
- int result = sem_wait(&sem_);
- if (result == 0) return; // Successfully got semaphore.
- CHECK(result == -1 && errno == EINTR); // Signal caused spurious wakeup.
- }
-}
-
-
-#ifndef TIMEVAL_TO_TIMESPEC
-#define TIMEVAL_TO_TIMESPEC(tv, ts) do { \
- (ts)->tv_sec = (tv)->tv_sec; \
- (ts)->tv_nsec = (tv)->tv_usec * 1000; \
-} while (false)
-#endif
-
-
-bool QNXSemaphore::Wait(int timeout) {
- const long kOneSecondMicros = 1000000; // NOLINT
-
- // Split timeout into second and nanosecond parts.
- struct timeval delta;
- delta.tv_usec = timeout % kOneSecondMicros;
- delta.tv_sec = timeout / kOneSecondMicros;
-
- struct timeval current_time;
- // Get the current time.
- if (gettimeofday(&current_time, NULL) == -1) {
- return false;
- }
-
- // Calculate time for end of timeout.
- struct timeval end_time;
- timeradd(&current_time, &delta, &end_time);
-
- struct timespec ts;
- TIMEVAL_TO_TIMESPEC(&end_time, &ts);
- // Wait for semaphore signalled or timeout.
- while (true) {
- int result = sem_timedwait(&sem_, &ts);
- if (result == 0) return true; // Successfully got semaphore.
- if (result == -1 && errno == ETIMEDOUT) return false; // Timeout.
- CHECK(result == -1 && errno == EINTR); // Signal caused spurious wakeup.
- }
-}
-
-
-Semaphore* OS::CreateSemaphore(int count) {
- return new QNXSemaphore(count);
-}
-
-
-static int GetThreadID() {
- pthread_t thread_id = pthread_self();
- return thread_id;
-}
-
-
-static void ProfilerSignalHandler(int signal, siginfo_t* info, void* context) {
- USE(info);
- if (signal != SIGPROF) return;
- Isolate* isolate = Isolate::UncheckedCurrent();
- if (isolate == NULL || !isolate->IsInitialized() || !isolate->IsInUse()) {
- // We require a fully initialized and entered isolate.
- return;
- }
- if (v8::Locker::IsActive() &&
- !isolate->thread_manager()->IsLockedByCurrentThread()) {
- return;
- }
-
- Sampler* sampler = isolate->logger()->sampler();
- if (sampler == NULL || !sampler->IsActive()) return;
-
- TickSample sample_obj;
- TickSample* sample = CpuProfiler::TickSampleEvent(isolate);
- if (sample == NULL) sample = &sample_obj;
-
- // Extracting the sample from the context is extremely machine dependent.
- ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
- mcontext_t& mcontext = ucontext->uc_mcontext;
- sample->state = isolate->current_vm_state();
-#if V8_HOST_ARCH_IA32
- sample->pc = reinterpret_cast<Address>(mcontext.cpu.eip);
- sample->sp = reinterpret_cast<Address>(mcontext.cpu.esp);
- sample->fp = reinterpret_cast<Address>(mcontext.cpu.ebp);
-#elif V8_HOST_ARCH_X64
- sample->pc = reinterpret_cast<Address>(mcontext.cpu.rip);
- sample->sp = reinterpret_cast<Address>(mcontext.cpu.rsp);
- sample->fp = reinterpret_cast<Address>(mcontext.cpu.rbp);
-#elif V8_HOST_ARCH_ARM
- sample->pc = reinterpret_cast<Address>(mcontext.cpu.gpr[ARM_REG_PC]);
- sample->sp = reinterpret_cast<Address>(mcontext.cpu.gpr[ARM_REG_SP]);
- sample->fp = reinterpret_cast<Address>(mcontext.cpu.gpr[ARM_REG_FP]);
-#endif
- sampler->SampleStack(sample);
- sampler->Tick(sample);
-}
-
-
-class Sampler::PlatformData : public Malloced {
- public:
- PlatformData() : vm_tid_(GetThreadID()) {}
-
- int vm_tid() const { return vm_tid_; }
-
- private:
- const int vm_tid_;
-};
-
-
-class SignalSender : public Thread {
- public:
- enum SleepInterval {
- HALF_INTERVAL,
- FULL_INTERVAL
- };
-
- static const int kSignalSenderStackSize = 32 * KB;
-
- explicit SignalSender(int interval)
- : Thread("SignalSender"),
- vm_tgid_(getpid()),
- interval_(interval) {}
-
- static void InstallSignalHandler() {
- struct sigaction sa;
- sa.sa_sigaction = ProfilerSignalHandler;
- sigemptyset(&sa.sa_mask);
- sa.sa_flags = SA_SIGINFO;
- signal_handler_installed_ =
- (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
- }
-
- static void RestoreSignalHandler() {
- if (signal_handler_installed_) {
- sigaction(SIGPROF, &old_signal_handler_, 0);
- signal_handler_installed_ = false;
- }
- }
-
- static void AddActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_);
- SamplerRegistry::AddActiveSampler(sampler);
- if (instance_ == NULL) {
- // Start a thread that will send SIGPROF signal to VM threads,
- // when CPU profiling will be enabled.
- instance_ = new SignalSender(sampler->interval());
- instance_->Start();
- } else {
- ASSERT(instance_->interval_ == sampler->interval());
- }
- }
-
- static void RemoveActiveSampler(Sampler* sampler) {
- ScopedLock lock(mutex_);
- SamplerRegistry::RemoveActiveSampler(sampler);
- if (SamplerRegistry::GetState() == SamplerRegistry::HAS_NO_SAMPLERS) {
- RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(instance_);
- delete instance_;
- instance_ = NULL;
- RestoreSignalHandler();
- }
- }
-
- // Implement Thread::Run().
- virtual void Run() {
- SamplerRegistry::State state;
- while ((state = SamplerRegistry::GetState()) !=
- SamplerRegistry::HAS_NO_SAMPLERS) {
- bool cpu_profiling_enabled =
- (state == SamplerRegistry::HAS_CPU_PROFILING_SAMPLERS);
- bool runtime_profiler_enabled = RuntimeProfiler::IsEnabled();
- if (cpu_profiling_enabled && !signal_handler_installed_) {
- InstallSignalHandler();
- } else if (!cpu_profiling_enabled && signal_handler_installed_) {
- RestoreSignalHandler();
- }
- // When CPU profiling is enabled both JavaScript and C++ code is
- // profiled. We must not suspend.
- if (!cpu_profiling_enabled) {
- if (rate_limiter_.SuspendIfNecessary()) continue;
- }
- if (cpu_profiling_enabled && runtime_profiler_enabled) {
- if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile, this)) {
- return;
- }
- Sleep(HALF_INTERVAL);
- if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile, NULL)) {
- return;
- }
- Sleep(HALF_INTERVAL);
- } else {
- if (cpu_profiling_enabled) {
- if (!SamplerRegistry::IterateActiveSamplers(&DoCpuProfile,
- this)) {
- return;
- }
- }
- if (runtime_profiler_enabled) {
- if (!SamplerRegistry::IterateActiveSamplers(&DoRuntimeProfile,
- NULL)) {
- return;
- }
- }
- Sleep(FULL_INTERVAL);
- }
- }
- }
-
- static void DoCpuProfile(Sampler* sampler, void* raw_sender) {
- if (!sampler->IsProfiling()) return;
- SignalSender* sender = reinterpret_cast<SignalSender*>(raw_sender);
- sender->SendProfilingSignal(sampler->platform_data()->vm_tid());
- }
-
- static void DoRuntimeProfile(Sampler* sampler, void* ignored) {
- if (!sampler->isolate()->IsInitialized()) return;
- sampler->isolate()->runtime_profiler()->NotifyTick();
- }
-
- void SendProfilingSignal(int tid) {
- if (!signal_handler_installed_) return;
- pthread_kill(tid, SIGPROF);
- }
-
- void Sleep(SleepInterval full_or_half) {
- // Convert ms to us and subtract 100 us to compensate delays
- // occuring during signal delivery.
- useconds_t interval = interval_ * 1000 - 100;
- if (full_or_half == HALF_INTERVAL) interval /= 2;
- int result = usleep(interval);
-#ifdef DEBUG
- if (result != 0 && errno != EINTR) {
- fprintf(stderr,
- "SignalSender usleep error; interval = %u, errno = %d\n",
- interval,
- errno);
- ASSERT(result == 0 || errno == EINTR);
- }
-#endif
- USE(result);
- }
-
- const int vm_tgid_;
- const int interval_;
- RuntimeProfilerRateLimiter rate_limiter_;
-
- // Protects the process wide state below.
- static Mutex* mutex_;
- static SignalSender* instance_;
- static bool signal_handler_installed_;
- static struct sigaction old_signal_handler_;
-
- DISALLOW_COPY_AND_ASSIGN(SignalSender);
-};
-
-
-Mutex* SignalSender::mutex_ = OS::CreateMutex();
-SignalSender* SignalSender::instance_ = NULL;
-struct sigaction SignalSender::old_signal_handler_;
-bool SignalSender::signal_handler_installed_ = false;
-
-
-Sampler::Sampler(Isolate* isolate, int interval)
- : isolate_(isolate),
- interval_(interval),
- profiling_(false),
- active_(false),
- samples_taken_(0) {
- data_ = new PlatformData;
-}
-
-
-Sampler::~Sampler() {
- ASSERT(!IsActive());
- delete data_;
-}
-
-
-void Sampler::Start() {
- ASSERT(!IsActive());
- SetActive(true);
- SignalSender::AddActiveSampler(this);
-}
-
-
-void Sampler::Stop() {
- ASSERT(IsActive());
- SignalSender::RemoveActiveSampler(this);
- SetActive(false);
-}
-
-
-} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/platform-solaris.cc b/src/3rdparty/v8/src/platform-solaris.cc
index 035d394..be8bbfc 100644
--- a/src/3rdparty/v8/src/platform-solaris.cc
+++ b/src/3rdparty/v8/src/platform-solaris.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -52,7 +52,9 @@
#include "v8.h"
+#include "platform-posix.h"
#include "platform.h"
+#include "v8threads.h"
#include "vm-state-inl.h"
@@ -89,15 +91,10 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly will cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srandom(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+
+
+void OS::PostSetUp() {
+ POSIXPostSetUp();
}
@@ -139,7 +136,7 @@ double OS::LocalTimeOffset() {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -322,43 +319,132 @@ static const int kMmapFd = -1;
static const int kMmapFdOffset = 0;
+VirtualMemory::VirtualMemory() : address_(NULL), size_(0) { }
+
VirtualMemory::VirtualMemory(size_t size) {
- address_ = mmap(NULL, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANON | MAP_NORESERVE,
- kMmapFd, kMmapFdOffset);
+ address_ = ReserveRegion(size);
size_ = size;
}
+VirtualMemory::VirtualMemory(size_t size, size_t alignment)
+ : address_(NULL), size_(0) {
+ ASSERT(IsAligned(alignment, static_cast<intptr_t>(OS::AllocateAlignment())));
+ size_t request_size = RoundUp(size + alignment,
+ static_cast<intptr_t>(OS::AllocateAlignment()));
+ void* reservation = mmap(OS::GetRandomMmapAddr(),
+ request_size,
+ PROT_NONE,
+ MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
+ kMmapFd,
+ kMmapFdOffset);
+ if (reservation == MAP_FAILED) return;
+
+ Address base = static_cast<Address>(reservation);
+ Address aligned_base = RoundUp(base, alignment);
+ ASSERT_LE(base, aligned_base);
+
+ // Unmap extra memory reserved before and after the desired block.
+ if (aligned_base != base) {
+ size_t prefix_size = static_cast<size_t>(aligned_base - base);
+ OS::Free(base, prefix_size);
+ request_size -= prefix_size;
+ }
+
+ size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
+ ASSERT_LE(aligned_size, request_size);
+
+ if (aligned_size != request_size) {
+ size_t suffix_size = request_size - aligned_size;
+ OS::Free(aligned_base + aligned_size, suffix_size);
+ request_size -= suffix_size;
+ }
+
+ ASSERT(aligned_size == request_size);
+
+ address_ = static_cast<void*>(aligned_base);
+ size_ = aligned_size;
+}
+
+
VirtualMemory::~VirtualMemory() {
if (IsReserved()) {
- if (0 == munmap(address(), size())) address_ = MAP_FAILED;
+ bool result = ReleaseRegion(address(), size());
+ ASSERT(result);
+ USE(result);
}
}
bool VirtualMemory::IsReserved() {
- return address_ != MAP_FAILED;
+ return address_ != NULL;
+}
+
+
+void VirtualMemory::Reset() {
+ address_ = NULL;
+ size_ = 0;
+}
+
+
+bool VirtualMemory::Commit(void* address, size_t size, bool is_executable) {
+ return CommitRegion(address, size, is_executable);
+}
+
+
+bool VirtualMemory::Uncommit(void* address, size_t size) {
+ return UncommitRegion(address, size);
+}
+
+
+bool VirtualMemory::Guard(void* address) {
+ OS::Guard(address, OS::CommitPageSize());
+ return true;
}
-bool VirtualMemory::Commit(void* address, size_t size, bool executable) {
- int prot = PROT_READ | PROT_WRITE | (executable ? PROT_EXEC : 0);
- if (MAP_FAILED == mmap(address, size, prot,
- MAP_PRIVATE | MAP_ANON | MAP_FIXED,
- kMmapFd, kMmapFdOffset)) {
+void* VirtualMemory::ReserveRegion(size_t size) {
+ void* result = mmap(OS::GetRandomMmapAddr(),
+ size,
+ PROT_NONE,
+ MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
+ kMmapFd,
+ kMmapFdOffset);
+
+ if (result == MAP_FAILED) return NULL;
+
+ return result;
+}
+
+
+bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
+ int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
+ if (MAP_FAILED == mmap(base,
+ size,
+ prot,
+ MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED,
+ kMmapFd,
+ kMmapFdOffset)) {
return false;
}
- UpdateAllocatedSpaceLimits(address, size);
+ UpdateAllocatedSpaceLimits(base, size);
return true;
}
-bool VirtualMemory::Uncommit(void* address, size_t size) {
- return mmap(address, size, PROT_NONE,
- MAP_PRIVATE | MAP_ANON | MAP_NORESERVE | MAP_FIXED,
- kMmapFd, kMmapFdOffset) != MAP_FAILED;
+bool VirtualMemory::UncommitRegion(void* base, size_t size) {
+ return mmap(base,
+ size,
+ PROT_NONE,
+ MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE | MAP_FIXED,
+ kMmapFd,
+ kMmapFdOffset) != MAP_FAILED;
+}
+
+
+bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
+ return munmap(base, size) == 0;
}
@@ -369,17 +455,11 @@ class Thread::PlatformData : public Malloced {
pthread_t thread_; // Thread handle for pthread.
};
-Thread::Thread(const Options& options)
- : data_(new PlatformData()),
- stack_size_(options.stack_size) {
- set_name(options.name);
-}
-
-Thread::Thread(const char* name)
+Thread::Thread(const Options& options)
: data_(new PlatformData()),
- stack_size_(0) {
- set_name(name);
+ stack_size_(options.stack_size()) {
+ set_name(options.name());
}
@@ -626,10 +706,18 @@ class SignalSender : public Thread {
FULL_INTERVAL
};
+ static const int kSignalSenderStackSize = 64 * KB;
+
explicit SignalSender(int interval)
- : Thread("SignalSender"),
+ : Thread(Thread::Options("SignalSender", kSignalSenderStackSize)),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void InstallSignalHandler() {
struct sigaction sa;
sa.sa_sigaction = ProfilerSignalHandler;
@@ -759,15 +847,29 @@ class SignalSender : public Thread {
static bool signal_handler_installed_;
static struct sigaction old_signal_handler_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SignalSender);
};
-Mutex* SignalSender::mutex_ = OS::CreateMutex();
+Mutex* SignalSender::mutex_ = NULL;
SignalSender* SignalSender::instance_ = NULL;
struct sigaction SignalSender::old_signal_handler_;
bool SignalSender::signal_handler_installed_ = false;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly will cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srandom(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SignalSender::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform-win32.cc b/src/3rdparty/v8/src/platform-win32.cc
index 537440b..e36fc87 100644
--- a/src/3rdparty/v8/src/platform-win32.cc
+++ b/src/3rdparty/v8/src/platform-win32.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -32,6 +32,7 @@
#include "v8.h"
+#include "codegen.h"
#include "platform.h"
#include "vm-state-inl.h"
@@ -140,20 +141,11 @@ static Mutex* limit_mutex = NULL;
#if defined(V8_TARGET_ARCH_IA32)
static OS::MemCopyFunction memcopy_function = NULL;
-static Mutex* memcopy_function_mutex = OS::CreateMutex();
// Defined in codegen-ia32.cc.
OS::MemCopyFunction CreateMemCopyFunction();
// Copy memory area to disjoint memory area.
void OS::MemCopy(void* dest, const void* src, size_t size) {
- if (memcopy_function == NULL) {
- ScopedLock lock(memcopy_function_mutex);
- if (memcopy_function == NULL) {
- OS::MemCopyFunction temp = CreateMemCopyFunction();
- MemoryBarrier();
- memcopy_function = temp;
- }
- }
// Note: here we rely on dependent reads being ordered. This is true
// on all architectures we currently support.
(*memcopy_function)(dest, src, size);
@@ -166,19 +158,14 @@ void OS::MemCopy(void* dest, const void* src, size_t size) {
#ifdef _WIN64
typedef double (*ModuloFunction)(double, double);
static ModuloFunction modulo_function = NULL;
-static Mutex* modulo_function_mutex = OS::CreateMutex();
// Defined in codegen-x64.cc.
ModuloFunction CreateModuloFunction();
+void init_modulo_function() {
+ modulo_function = CreateModuloFunction();
+}
+
double modulo(double x, double y) {
- if (modulo_function == NULL) {
- ScopedLock lock(modulo_function_mutex);
- if (modulo_function == NULL) {
- ModuloFunction temp = CreateModuloFunction();
- MemoryBarrier();
- modulo_function = temp;
- }
- }
// Note: here we rely on dependent reads being ordered. This is true
// on all architectures we currently support.
return (*modulo_function)(x, y);
@@ -198,9 +185,40 @@ double modulo(double x, double y) {
#endif // _WIN64
+
+#define UNARY_MATH_FUNCTION(name, generator) \
+static UnaryMathFunction fast_##name##_function = NULL; \
+void init_fast_##name##_function() { \
+ fast_##name##_function = generator; \
+} \
+double fast_##name(double x) { \
+ return (*fast_##name##_function)(x); \
+}
+
+UNARY_MATH_FUNCTION(sin, CreateTranscendentalFunction(TranscendentalCache::SIN))
+UNARY_MATH_FUNCTION(cos, CreateTranscendentalFunction(TranscendentalCache::COS))
+UNARY_MATH_FUNCTION(tan, CreateTranscendentalFunction(TranscendentalCache::TAN))
+UNARY_MATH_FUNCTION(log, CreateTranscendentalFunction(TranscendentalCache::LOG))
+UNARY_MATH_FUNCTION(sqrt, CreateSqrtFunction())
+
+#undef MATH_FUNCTION
+
+
+void MathSetup() {
+#ifdef _WIN64
+ init_modulo_function();
+#endif
+ init_fast_sin_function();
+ init_fast_cos_function();
+ init_fast_tan_function();
+ init_fast_log_function();
+ init_fast_sqrt_function();
+}
+
+
// ----------------------------------------------------------------------------
// The Time class represents time on win32. A timestamp is represented as
-// a 64-bit integer in 100 nano-seconds since January 1, 1601 (UTC). JavaScript
+// a 64-bit integer in 100 nanoseconds since January 1, 1601 (UTC). JavaScript
// timestamps are represented as a doubles in milliseconds since 00:00:00 UTC,
// January 1, 1970.
@@ -442,6 +460,9 @@ void Time::SetToCurrentTime() {
// Check if we need to resync due to elapsed time.
needs_resync |= (time_now.t_ - init_time.t_) > kMaxClockElapsedTime;
+ // Check if we need to resync due to backwards time change.
+ needs_resync |= time_now.t_ < init_time.t_;
+
// Resync the clock if necessary.
if (needs_resync) {
GetSystemTimeAsFileTime(&init_time.ft_);
@@ -484,12 +505,13 @@ int64_t Time::LocalOffset() {
tm posix_local_time_struct;
if (localtime_s(&posix_local_time_struct, &posix_time)) return 0;
- if (posix_local_time_struct.tm_isdst > 0)
- return (tzinfo_.Bias + tzinfo_.DaylightBias) * -kMsPerMinute;
- else if (posix_local_time_struct.tm_isdst == 0)
- return (tzinfo_.Bias + tzinfo_.StandardBias) * -kMsPerMinute;
- else
- return tzinfo_.Bias * -kMsPerMinute;
+ if (posix_local_time_struct.tm_isdst > 0) {
+ return (tzinfo_.Bias + tzinfo_.DaylightBias) * -kMsPerMinute;
+ } else if (posix_local_time_struct.tm_isdst == 0) {
+ return (tzinfo_.Bias + tzinfo_.StandardBias) * -kMsPerMinute;
+ } else {
+ return tzinfo_.Bias * -kMsPerMinute;
+ }
}
@@ -532,15 +554,13 @@ char* Time::LocalTimezone() {
}
-void OS::Setup() {
- // Seed the random number generator.
- // Convert the current time to a 64-bit integer first, before converting it
- // to an unsigned. Going directly can cause an overflow and the seed to be
- // set to all ones. The seed will be identical for different instances that
- // call this setup code within the same millisecond.
- uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
- srand(static_cast<unsigned int>(seed));
- limit_mutex = CreateMutex();
+void OS::PostSetUp() {
+ // Math functions depend on CPU features therefore they are initialized after
+ // CPU.
+ MathSetup();
+#if defined(V8_TARGET_ARCH_IA32)
+ memcopy_function = CreateMemCopyFunction();
+#endif
}
@@ -780,7 +800,7 @@ void OS::StrNCpy(Vector<char> dest, const char* src, size_t n) {
// We keep the lowest and highest addresses mapped as a quick way of
// determining that pointers are outside the heap (used mostly in assertions
-// and verification). The estimate is conservative, ie, not all addresses in
+// and verification). The estimate is conservative, i.e., not all addresses in
// 'allocated' space are actually allocated to our heap. The range is
// [lowest, highest), inclusive on the low and and exclusive on the high end.
static void* lowest_ever_allocated = reinterpret_cast<void*>(-1);
@@ -835,43 +855,62 @@ size_t OS::AllocateAlignment() {
}
-void* OS::Allocate(const size_t requested,
- size_t* allocated,
- bool is_executable) {
- // The address range used to randomize RWX allocations in OS::Allocate
- // Try not to map pages into the default range that windows loads DLLs
- // Use a multiple of 64k to prevent committing unused memory.
- // Note: This does not guarantee RWX regions will be within the
- // range kAllocationRandomAddressMin to kAllocationRandomAddressMax
+static void* GetRandomAddr() {
+ Isolate* isolate = Isolate::UncheckedCurrent();
+ // Note that the current isolate isn't set up in a call path via
+ // CpuFeatures::Probe. We don't care about randomization in this case because
+ // the code page is immediately freed.
+ if (isolate != NULL) {
+ // The address range used to randomize RWX allocations in OS::Allocate
+ // Try not to map pages into the default range that windows loads DLLs
+ // Use a multiple of 64k to prevent committing unused memory.
+ // Note: This does not guarantee RWX regions will be within the
+ // range kAllocationRandomAddressMin to kAllocationRandomAddressMax
#ifdef V8_HOST_ARCH_64_BIT
- static const intptr_t kAllocationRandomAddressMin = 0x0000000080000000;
- static const intptr_t kAllocationRandomAddressMax = 0x000003FFFFFF0000;
+ static const intptr_t kAllocationRandomAddressMin = 0x0000000080000000;
+ static const intptr_t kAllocationRandomAddressMax = 0x000003FFFFFF0000;
#else
- static const intptr_t kAllocationRandomAddressMin = 0x04000000;
- static const intptr_t kAllocationRandomAddressMax = 0x3FFF0000;
+ static const intptr_t kAllocationRandomAddressMin = 0x04000000;
+ static const intptr_t kAllocationRandomAddressMax = 0x3FFF0000;
#endif
+ uintptr_t address = (V8::RandomPrivate(isolate) << kPageSizeBits)
+ | kAllocationRandomAddressMin;
+ address &= kAllocationRandomAddressMax;
+ return reinterpret_cast<void *>(address);
+ }
+ return NULL;
+}
+
+
+static void* RandomizedVirtualAlloc(size_t size, int action, int protection) {
+ LPVOID base = NULL;
+ if (protection == PAGE_EXECUTE_READWRITE || protection == PAGE_NOACCESS) {
+ // For exectutable pages try and randomize the allocation address
+ for (size_t attempts = 0; base == NULL && attempts < 3; ++attempts) {
+ base = VirtualAlloc(GetRandomAddr(), size, action, protection);
+ }
+ }
+
+ // After three attempts give up and let the OS find an address to use.
+ if (base == NULL) base = VirtualAlloc(NULL, size, action, protection);
+
+ return base;
+}
+
+
+void* OS::Allocate(const size_t requested,
+ size_t* allocated,
+ bool is_executable) {
// VirtualAlloc rounds allocated size to page size automatically.
size_t msize = RoundUp(requested, static_cast<int>(GetPageSize()));
- intptr_t address = 0;
// Windows XP SP2 allows Data Excution Prevention (DEP).
int prot = is_executable ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
- // For exectutable pages try and randomize the allocation address
- if (prot == PAGE_EXECUTE_READWRITE &&
- msize >= static_cast<size_t>(Page::kPageSize)) {
- address = (V8::RandomPrivate(Isolate::Current()) << kPageSizeBits)
- | kAllocationRandomAddressMin;
- address &= kAllocationRandomAddressMax;
- }
-
- LPVOID mbase = VirtualAlloc(reinterpret_cast<void *>(address),
- msize,
- MEM_COMMIT | MEM_RESERVE,
- prot);
- if (mbase == NULL && address != 0)
- mbase = VirtualAlloc(NULL, msize, MEM_COMMIT | MEM_RESERVE, prot);
+ LPVOID mbase = RandomizedVirtualAlloc(msize,
+ MEM_COMMIT | MEM_RESERVE,
+ prot);
if (mbase == NULL) {
LOG(ISOLATE, StringEvent("OS::Allocate", "VirtualAlloc failed"));
@@ -893,6 +932,11 @@ void OS::Free(void* address, const size_t size) {
}
+intptr_t OS::CommitPageSize() {
+ return 4096;
+}
+
+
void OS::ProtectCode(void* address, const size_t size) {
DWORD old_protect;
VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
@@ -911,11 +955,11 @@ void OS::Sleep(int milliseconds) {
void OS::Abort() {
- if (!IsDebuggerPresent()) {
+ if (IsDebuggerPresent() || FLAG_break_on_abort) {
+ DebugBreak();
+ } else {
// Make the MSVCRT do a silent abort.
raise(SIGABRT);
- } else {
- DebugBreak();
}
}
@@ -1466,7 +1510,7 @@ bool VirtualMemory::Uncommit(void* address, size_t size) {
void* VirtualMemory::ReserveRegion(size_t size) {
- return VirtualAlloc(NULL, size, MEM_RESERVE, PAGE_NOACCESS);
+ return RandomizedVirtualAlloc(size, MEM_RESERVE, PAGE_NOACCESS);
}
@@ -1481,6 +1525,17 @@ bool VirtualMemory::CommitRegion(void* base, size_t size, bool is_executable) {
}
+bool VirtualMemory::Guard(void* address) {
+ if (NULL == VirtualAlloc(address,
+ OS::CommitPageSize(),
+ MEM_COMMIT,
+ PAGE_READONLY | PAGE_GUARD)) {
+ return false;
+ }
+ return true;
+}
+
+
bool VirtualMemory::UncommitRegion(void* base, size_t size) {
return VirtualFree(base, size, MEM_DECOMMIT) != 0;
}
@@ -1491,7 +1546,6 @@ bool VirtualMemory::ReleaseRegion(void* base, size_t size) {
}
-
// ----------------------------------------------------------------------------
// Win32 thread support.
@@ -1521,16 +1575,9 @@ class Thread::PlatformData : public Malloced {
// handle until it is started.
Thread::Thread(const Options& options)
- : stack_size_(options.stack_size) {
- data_ = new PlatformData(kNoThread);
- set_name(options.name);
-}
-
-
-Thread::Thread(const char* name)
- : stack_size_(0) {
+ : stack_size_(options.stack_size()) {
data_ = new PlatformData(kNoThread);
- set_name(name);
+ set_name(options.name());
}
@@ -1820,7 +1867,7 @@ bool Win32Socket::SetReuseAddress(bool reuse_address) {
}
-bool Socket::Setup() {
+bool Socket::SetUp() {
// Initialize Winsock32
int err;
WSADATA winsock_data;
@@ -1896,10 +1943,18 @@ class Sampler::PlatformData : public Malloced {
class SamplerThread : public Thread {
public:
+ static const int kSamplerThreadStackSize = 64 * KB;
+
explicit SamplerThread(int interval)
- : Thread("SamplerThread"),
+ : Thread(Thread::Options("SamplerThread", kSamplerThreadStackSize)),
interval_(interval) {}
+ static void SetUp() {
+ if (!mutex_) {
+ mutex_ = OS::CreateMutex();
+ }
+ }
+
static void AddActiveSampler(Sampler* sampler) {
ScopedLock lock(mutex_);
SamplerRegistry::AddActiveSampler(sampler);
@@ -2001,14 +2056,28 @@ class SamplerThread : public Thread {
static Mutex* mutex_;
static SamplerThread* instance_;
+ private:
DISALLOW_COPY_AND_ASSIGN(SamplerThread);
};
-Mutex* SamplerThread::mutex_ = OS::CreateMutex();
+Mutex* SamplerThread::mutex_ = NULL;
SamplerThread* SamplerThread::instance_ = NULL;
+void OS::SetUp() {
+ // Seed the random number generator.
+ // Convert the current time to a 64-bit integer first, before converting it
+ // to an unsigned. Going directly can cause an overflow and the seed to be
+ // set to all ones. The seed will be identical for different instances that
+ // call this setup code within the same millisecond.
+ uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
+ srand(static_cast<unsigned int>(seed));
+ limit_mutex = CreateMutex();
+ SamplerThread::SetUp();
+}
+
+
Sampler::Sampler(Isolate* isolate, int interval)
: isolate_(isolate),
interval_(interval),
diff --git a/src/3rdparty/v8/src/platform.h b/src/3rdparty/v8/src/platform.h
index f84b6b1..3b2aa3c 100644
--- a/src/3rdparty/v8/src/platform.h
+++ b/src/3rdparty/v8/src/platform.h
@@ -79,6 +79,7 @@ int random();
#endif // WIN32
#include "atomicops.h"
+#include "lazy-instance.h"
#include "platform-tls.h"
#include "utils.h"
#include "v8globals.h"
@@ -96,6 +97,13 @@ class Mutex;
double ceiling(double x);
double modulo(double x, double y);
+// Custom implementation of sin, cos, tan and log.
+double fast_sin(double input);
+double fast_cos(double input);
+double fast_tan(double input);
+double fast_log(double input);
+double fast_sqrt(double input);
+
// Forward declarations.
class Socket;
@@ -109,7 +117,11 @@ class Socket;
class OS {
public:
// Initializes the platform OS support. Called once at VM startup.
- static void Setup();
+ static void SetUp();
+
+ // Initializes the platform OS support that depend on CPU features. This is
+ // called after CPU initialization.
+ static void PostSetUp();
// Returns the accumulated user time for thread. This routine
// can be used for profiling. The implementation should
@@ -172,6 +184,10 @@ class OS {
bool is_executable);
static void Free(void* address, const size_t size);
+ // This is the granularity at which the ProtectCode(...) call can set page
+ // permissions.
+ static intptr_t CommitPageSize();
+
// Mark code segments non-writable.
static void ProtectCode(void* address, const size_t size);
@@ -352,6 +368,9 @@ class VirtualMemory {
// Uncommit real memory. Returns whether the operation succeeded.
bool Uncommit(void* address, size_t size);
+ // Creates a single guard page at the given address.
+ bool Guard(void* address);
+
void Release() {
ASSERT(IsReserved());
// Notice: Order is important here. The VirtualMemory object might live
@@ -408,16 +427,22 @@ class Thread {
LOCAL_STORAGE_KEY_MAX_VALUE = kMaxInt
};
- struct Options {
- Options() : name("v8:<unknown>"), stack_size(0) {}
+ class Options {
+ public:
+ Options() : name_("v8:<unknown>"), stack_size_(0) {}
+ Options(const char* name, int stack_size = 0)
+ : name_(name), stack_size_(stack_size) {}
+
+ const char* name() const { return name_; }
+ int stack_size() const { return stack_size_; }
- const char* name;
- int stack_size;
+ private:
+ const char* name_;
+ int stack_size_;
};
// Create new thread.
explicit Thread(const Options& options);
- explicit Thread(const char* name);
virtual ~Thread();
// Start new thread by calling the Run() method in the new thread.
@@ -473,7 +498,7 @@ class Thread {
PlatformData* data() { return data_; }
private:
- void set_name(const char *name);
+ void set_name(const char* name);
PlatformData* data_;
@@ -509,6 +534,25 @@ class Mutex {
virtual bool TryLock() = 0;
};
+struct CreateMutexTrait {
+ static Mutex* Create() {
+ return OS::CreateMutex();
+ }
+};
+
+// POD Mutex initialized lazily (i.e. the first time Pointer() is called).
+// Usage:
+// static LazyMutex my_mutex = LAZY_MUTEX_INITIALIZER;
+//
+// void my_function() {
+// ScopedLock my_lock(my_mutex.Pointer());
+// // Do something.
+// }
+//
+typedef LazyDynamicInstance<
+ Mutex, CreateMutexTrait, ThreadSafeInitOnceTrait>::type LazyMutex;
+
+#define LAZY_MUTEX_INITIALIZER LAZY_DYNAMIC_INSTANCE_INITIALIZER
// ----------------------------------------------------------------------------
// ScopedLock
@@ -549,7 +593,7 @@ class Semaphore {
virtual void Wait() = 0;
// Suspends the calling thread until the counter is non zero or the timeout
- // time has passsed. If timeout happens the return value is false and the
+ // time has passed. If timeout happens the return value is false and the
// counter is unchanged. Otherwise the semaphore counter is decremented and
// true is returned. The timeout value is specified in microseconds.
virtual bool Wait(int timeout) = 0;
@@ -558,6 +602,31 @@ class Semaphore {
virtual void Signal() = 0;
};
+template <int InitialValue>
+struct CreateSemaphoreTrait {
+ static Semaphore* Create() {
+ return OS::CreateSemaphore(InitialValue);
+ }
+};
+
+// POD Semaphore initialized lazily (i.e. the first time Pointer() is called).
+// Usage:
+// // The following semaphore starts at 0.
+// static LazySemaphore<0>::type my_semaphore = LAZY_SEMAPHORE_INITIALIZER;
+//
+// void my_function() {
+// // Do something with my_semaphore.Pointer().
+// }
+//
+template <int InitialValue>
+struct LazySemaphore {
+ typedef typename LazyDynamicInstance<
+ Semaphore, CreateSemaphoreTrait<InitialValue>,
+ ThreadSafeInitOnceTrait>::type type;
+};
+
+#define LAZY_SEMAPHORE_INITIALIZER LAZY_DYNAMIC_INSTANCE_INITIALIZER
+
// ----------------------------------------------------------------------------
// Socket
@@ -589,7 +658,7 @@ class Socket {
virtual bool IsValid() const = 0;
- static bool Setup();
+ static bool SetUp();
static int LastError();
static uint16_t HToN(uint16_t value);
static uint16_t NToH(uint16_t value);
diff --git a/src/3rdparty/v8/src/preparse-data.h b/src/3rdparty/v8/src/preparse-data.h
index c4ddecd..f347430 100644
--- a/src/3rdparty/v8/src/preparse-data.h
+++ b/src/3rdparty/v8/src/preparse-data.h
@@ -49,11 +49,11 @@ class ParserRecorder {
int end,
int literals,
int properties,
- StrictModeFlag strict_mode) = 0;
+ LanguageMode language_mode) = 0;
// Logs a symbol creation of a literal or identifier.
virtual void LogAsciiSymbol(int start, Vector<const char> literal) { }
- virtual void LogUC16Symbol(int start, Vector<const uc16> literal) { }
+ virtual void LogUtf16Symbol(int start, Vector<const uc16> literal) { }
// Logs an error message and marks the log as containing an error.
// Further logging will be ignored, and ExtractData will return a vector
@@ -89,12 +89,12 @@ class FunctionLoggingParserRecorder : public ParserRecorder {
int end,
int literals,
int properties,
- StrictModeFlag strict_mode) {
+ LanguageMode language_mode) {
function_store_.Add(start);
function_store_.Add(end);
function_store_.Add(literals);
function_store_.Add(properties);
- function_store_.Add(strict_mode);
+ function_store_.Add(language_mode);
}
// Logs an error message and marks the log as containing an error.
@@ -149,7 +149,7 @@ class PartialParserRecorder : public FunctionLoggingParserRecorder {
public:
PartialParserRecorder() : FunctionLoggingParserRecorder() { }
virtual void LogAsciiSymbol(int start, Vector<const char> literal) { }
- virtual void LogUC16Symbol(int start, Vector<const uc16> literal) { }
+ virtual void LogUtf16Symbol(int start, Vector<const uc16> literal) { }
virtual ~PartialParserRecorder() { }
virtual Vector<unsigned> ExtractData();
virtual int symbol_position() { return 0; }
@@ -171,7 +171,7 @@ class CompleteParserRecorder: public FunctionLoggingParserRecorder {
LogSymbol(start, hash, true, Vector<const byte>::cast(literal));
}
- virtual void LogUC16Symbol(int start, Vector<const uc16> literal) {
+ virtual void LogUtf16Symbol(int start, Vector<const uc16> literal) {
if (!is_recording_) return;
int hash = vector_hash(literal);
LogSymbol(start, hash, false, Vector<const byte>::cast(literal));
diff --git a/src/3rdparty/v8/src/preparser-api.cc b/src/3rdparty/v8/src/preparser-api.cc
index 1bca9a3..6e8556a 100644
--- a/src/3rdparty/v8/src/preparser-api.cc
+++ b/src/3rdparty/v8/src/preparser-api.cc
@@ -46,10 +46,10 @@ namespace v8 {
namespace internal {
// UTF16Buffer based on a v8::UnicodeInputStream.
-class InputStreamUTF16Buffer : public UC16CharacterStream {
+class InputStreamUtf16Buffer : public Utf16CharacterStream {
public:
- /* The InputStreamUTF16Buffer maintains an internal buffer
- * that is filled in chunks from the UC16CharacterStream.
+ /* The InputStreamUtf16Buffer maintains an internal buffer
+ * that is filled in chunks from the Utf16CharacterStream.
* It also maintains unlimited pushback capability, but optimized
* for small pushbacks.
* The pushback_buffer_ pointer points to the limit of pushbacks
@@ -60,8 +60,8 @@ class InputStreamUTF16Buffer : public UC16CharacterStream {
* new buffer. When this buffer is read to the end again, the cursor is
* switched back to the internal buffer
*/
- explicit InputStreamUTF16Buffer(v8::UnicodeInputStream* stream)
- : UC16CharacterStream(),
+ explicit InputStreamUtf16Buffer(v8::UnicodeInputStream* stream)
+ : Utf16CharacterStream(),
stream_(stream),
pushback_buffer_(buffer_),
pushback_buffer_end_cache_(NULL),
@@ -70,7 +70,7 @@ class InputStreamUTF16Buffer : public UC16CharacterStream {
buffer_cursor_ = buffer_end_ = buffer_ + kPushBackSize;
}
- virtual ~InputStreamUTF16Buffer() {
+ virtual ~InputStreamUtf16Buffer() {
if (pushback_buffer_backing_ != NULL) {
DeleteArray(pushback_buffer_backing_);
}
@@ -127,12 +127,18 @@ class InputStreamUTF16Buffer : public UC16CharacterStream {
uc16* buffer_start = buffer_ + kPushBackSize;
buffer_cursor_ = buffer_end_ = buffer_start;
while ((value = stream_->Next()) >= 0) {
- if (value > static_cast<int32_t>(unibrow::Utf8::kMaxThreeByteChar)) {
- value = unibrow::Utf8::kBadChar;
+ if (value >
+ static_cast<int32_t>(unibrow::Utf16::kMaxNonSurrogateCharCode)) {
+ buffer_start[buffer_end_++ - buffer_start] =
+ unibrow::Utf16::LeadSurrogate(value);
+ buffer_start[buffer_end_++ - buffer_start] =
+ unibrow::Utf16::TrailSurrogate(value);
+ } else {
+ // buffer_end_ is a const pointer, but buffer_ is writable.
+ buffer_start[buffer_end_++ - buffer_start] = static_cast<uc16>(value);
}
- // buffer_end_ is a const pointer, but buffer_ is writable.
- buffer_start[buffer_end_++ - buffer_start] = static_cast<uc16>(value);
- if (buffer_end_ == buffer_ + kPushBackSize + kBufferSize) break;
+ // Stop one before the end of the buffer in case we get a surrogate pair.
+ if (buffer_end_ <= buffer_ + 1 + kPushBackSize + kBufferSize) break;
}
return buffer_end_ > buffer_start;
}
@@ -179,7 +185,7 @@ UnicodeInputStream::~UnicodeInputStream() { }
PreParserData Preparse(UnicodeInputStream* input, size_t max_stack) {
- internal::InputStreamUTF16Buffer buffer(input);
+ internal::InputStreamUtf16Buffer buffer(input);
uintptr_t stack_limit = reinterpret_cast<uintptr_t>(&buffer) - max_stack;
internal::UnicodeCache unicode_cache;
internal::Scanner scanner(&unicode_cache);
diff --git a/src/3rdparty/v8/src/preparser.cc b/src/3rdparty/v8/src/preparser.cc
index 291dfd3..20d3b9c 100644
--- a/src/3rdparty/v8/src/preparser.cc
+++ b/src/3rdparty/v8/src/preparser.cc
@@ -52,6 +52,34 @@ int isfinite(double value);
namespace preparser {
+PreParser::PreParseResult PreParser::PreParseLazyFunction(
+ i::LanguageMode mode, i::ParserRecorder* log) {
+ log_ = log;
+ // Lazy functions always have trivial outer scopes (no with/catch scopes).
+ Scope top_scope(&scope_, kTopLevelScope);
+ set_language_mode(mode);
+ Scope function_scope(&scope_, kFunctionScope);
+ ASSERT_EQ(i::Token::LBRACE, scanner_->current_token());
+ bool ok = true;
+ int start_position = scanner_->peek_location().beg_pos;
+ ParseLazyFunctionLiteralBody(&ok);
+ if (stack_overflow_) return kPreParseStackOverflow;
+ if (!ok) {
+ ReportUnexpectedToken(scanner_->current_token());
+ } else {
+ ASSERT_EQ(i::Token::RBRACE, scanner_->peek());
+ if (!is_classic_mode()) {
+ int end_pos = scanner_->location().end_pos;
+ CheckOctalLiteral(start_position, end_pos, &ok);
+ if (ok) {
+ CheckDelayedStrictModeViolation(start_position, end_pos, &ok);
+ }
+ }
+ }
+ return kPreParseSuccess;
+}
+
+
// Preparsing checks a JavaScript program and emits preparse-data that helps
// a later parsing to be faster.
// See preparser-data.h for the data.
@@ -149,7 +177,8 @@ PreParser::SourceElements PreParser::ParseSourceElements(int end_token,
Statement statement = ParseSourceElement(CHECK_OK);
if (allow_directive_prologue) {
if (statement.IsUseStrictLiteral()) {
- set_strict_mode();
+ set_language_mode(harmony_scoping_ ?
+ i::EXTENDED_MODE : i::STRICT_MODE);
} else if (!statement.IsStringLiteral()) {
allow_directive_prologue = false;
}
@@ -198,6 +227,7 @@ PreParser::Statement PreParser::ParseStatement(bool* ok) {
return ParseBlock(ok);
case i::Token::CONST:
+ case i::Token::LET:
case i::Token::VAR:
return ParseVariableStatement(kStatement, ok);
@@ -242,7 +272,7 @@ PreParser::Statement PreParser::ParseStatement(bool* ok) {
i::Scanner::Location start_location = scanner_->peek_location();
Statement statement = ParseFunctionDeclaration(CHECK_OK);
i::Scanner::Location end_location = scanner_->location();
- if (strict_mode() || harmony_scoping_) {
+ if (!is_classic_mode()) {
ReportMessageAt(start_location.beg_pos, end_location.end_pos,
"strict_function", NULL);
*ok = false;
@@ -295,7 +325,7 @@ PreParser::Statement PreParser::ParseBlock(bool* ok) {
//
Expect(i::Token::LBRACE, CHECK_OK);
while (peek() != i::Token::RBRACE) {
- if (harmony_scoping_) {
+ if (is_extended_mode()) {
ParseSourceElement(CHECK_OK);
} else {
ParseStatement(CHECK_OK);
@@ -348,24 +378,53 @@ PreParser::Statement PreParser::ParseVariableDeclarations(
if (peek() == i::Token::VAR) {
Consume(i::Token::VAR);
} else if (peek() == i::Token::CONST) {
- if (harmony_scoping_) {
- if (var_context != kSourceElement &&
- var_context != kForStatement) {
+ // TODO(ES6): The ES6 Draft Rev4 section 12.2.2 reads:
+ //
+ // ConstDeclaration : const ConstBinding (',' ConstBinding)* ';'
+ //
+ // * It is a Syntax Error if the code that matches this production is not
+ // contained in extended code.
+ //
+ // However disallowing const in classic mode will break compatibility with
+ // existing pages. Therefore we keep allowing const with the old
+ // non-harmony semantics in classic mode.
+ Consume(i::Token::CONST);
+ switch (language_mode()) {
+ case i::CLASSIC_MODE:
+ break;
+ case i::STRICT_MODE: {
i::Scanner::Location location = scanner_->peek_location();
- ReportMessageAt(location.beg_pos, location.end_pos,
- "unprotected_const", NULL);
+ ReportMessageAt(location, "strict_const", NULL);
*ok = false;
return Statement::Default();
}
- require_initializer = true;
- } else if (strict_mode()) {
+ case i::EXTENDED_MODE:
+ if (var_context != kSourceElement &&
+ var_context != kForStatement) {
+ i::Scanner::Location location = scanner_->peek_location();
+ ReportMessageAt(location.beg_pos, location.end_pos,
+ "unprotected_const", NULL);
+ *ok = false;
+ return Statement::Default();
+ }
+ require_initializer = true;
+ break;
+ }
+ } else if (peek() == i::Token::LET) {
+ // ES6 Draft Rev4 section 12.2.1:
+ //
+ // LetDeclaration : let LetBindingList ;
+ //
+ // * It is a Syntax Error if the code that matches this production is not
+ // contained in extended code.
+ if (!is_extended_mode()) {
i::Scanner::Location location = scanner_->peek_location();
- ReportMessageAt(location, "strict_const", NULL);
+ ReportMessageAt(location.beg_pos, location.end_pos,
+ "illegal_let", NULL);
*ok = false;
return Statement::Default();
}
- Consume(i::Token::CONST);
- } else if (peek() == i::Token::LET) {
+ Consume(i::Token::LET);
if (var_context != kSourceElement &&
var_context != kForStatement) {
i::Scanner::Location location = scanner_->peek_location();
@@ -374,7 +433,6 @@ PreParser::Statement PreParser::ParseVariableDeclarations(
*ok = false;
return Statement::Default();
}
- Consume(i::Token::LET);
} else {
*ok = false;
return Statement::Default();
@@ -389,7 +447,7 @@ PreParser::Statement PreParser::ParseVariableDeclarations(
// Parse variable name.
if (nvars > 0) Consume(i::Token::COMMA);
Identifier identifier = ParseIdentifier(CHECK_OK);
- if (strict_mode() && !identifier.IsValidStrictVariable()) {
+ if (!is_classic_mode() && !identifier.IsValidStrictVariable()) {
StrictModeIdentifierViolation(scanner_->location(),
"strict_var_name",
identifier,
@@ -417,7 +475,7 @@ PreParser::Statement PreParser::ParseExpressionOrLabelledStatement(bool* ok) {
Expression expr = ParseExpression(true, CHECK_OK);
if (expr.IsRawIdentifier()) {
ASSERT(!expr.AsIdentifier().IsFutureReserved());
- ASSERT(!strict_mode() || !expr.AsIdentifier().IsFutureStrictReserved());
+ ASSERT(is_classic_mode() || !expr.AsIdentifier().IsFutureStrictReserved());
if (peek() == i::Token::COLON) {
Consume(i::Token::COLON);
return ParseStatement(ok);
@@ -513,7 +571,7 @@ PreParser::Statement PreParser::ParseWithStatement(bool* ok) {
// WithStatement ::
// 'with' '(' Expression ')' Statement
Expect(i::Token::WITH, CHECK_OK);
- if (strict_mode()) {
+ if (!is_classic_mode()) {
i::Scanner::Location location = scanner_->location();
ReportMessageAt(location, "strict_mode_with", NULL);
*ok = false;
@@ -569,6 +627,7 @@ PreParser::Statement PreParser::ParseDoWhileStatement(bool* ok) {
Expect(i::Token::LPAREN, CHECK_OK);
ParseExpression(true, CHECK_OK);
Expect(i::Token::RPAREN, ok);
+ if (peek() == i::Token::SEMICOLON) Consume(i::Token::SEMICOLON);
return Statement::Default();
}
@@ -682,7 +741,7 @@ PreParser::Statement PreParser::ParseTryStatement(bool* ok) {
Consume(i::Token::CATCH);
Expect(i::Token::LPAREN, CHECK_OK);
Identifier id = ParseIdentifier(CHECK_OK);
- if (strict_mode() && !id.IsValidStrictVariable()) {
+ if (!is_classic_mode() && !id.IsValidStrictVariable()) {
StrictModeIdentifierViolation(scanner_->location(),
"strict_catch_variable",
id,
@@ -760,7 +819,8 @@ PreParser::Expression PreParser::ParseAssignmentExpression(bool accept_IN,
return expression;
}
- if (strict_mode() && expression.IsIdentifier() &&
+ if (!is_classic_mode() &&
+ expression.IsIdentifier() &&
expression.AsIdentifier().IsEvalOrArguments()) {
i::Scanner::Location after = scanner_->location();
ReportMessageAt(before.beg_pos, after.end_pos,
@@ -848,7 +908,8 @@ PreParser::Expression PreParser::ParseUnaryExpression(bool* ok) {
op = Next();
i::Scanner::Location before = scanner_->peek_location();
Expression expression = ParseUnaryExpression(CHECK_OK);
- if (strict_mode() && expression.IsIdentifier() &&
+ if (!is_classic_mode() &&
+ expression.IsIdentifier() &&
expression.AsIdentifier().IsEvalOrArguments()) {
i::Scanner::Location after = scanner_->location();
ReportMessageAt(before.beg_pos, after.end_pos,
@@ -870,7 +931,8 @@ PreParser::Expression PreParser::ParsePostfixExpression(bool* ok) {
Expression expression = ParseLeftHandSideExpression(CHECK_OK);
if (!scanner_->HasAnyLineTerminatorBeforeNext() &&
i::Token::IsCountOp(peek())) {
- if (strict_mode() && expression.IsIdentifier() &&
+ if (!is_classic_mode() &&
+ expression.IsIdentifier() &&
expression.AsIdentifier().IsEvalOrArguments()) {
i::Scanner::Location after = scanner_->location();
ReportMessageAt(before.beg_pos, after.end_pos,
@@ -1057,7 +1119,7 @@ PreParser::Expression PreParser::ParsePrimaryExpression(bool* ok) {
}
case i::Token::FUTURE_STRICT_RESERVED_WORD:
- if (strict_mode()) {
+ if (!is_classic_mode()) {
Next();
i::Scanner::Location location = scanner_->location();
ReportMessageAt(location, "strict_reserved_word", NULL);
@@ -1152,12 +1214,12 @@ void PreParser::CheckDuplicate(DuplicateFinder* finder,
old_type = finder->AddAsciiSymbol(scanner_->literal_ascii_string(),
type);
} else {
- old_type = finder->AddUC16Symbol(scanner_->literal_uc16_string(), type);
+ old_type = finder->AddUtf16Symbol(scanner_->literal_utf16_string(), type);
}
if (HasConflict(old_type, type)) {
if (IsDataDataConflict(old_type, type)) {
// Both are data properties.
- if (!strict_mode()) return;
+ if (is_classic_mode()) return;
ReportMessageAt(scanner_->location(),
"strict_duplicate_property", NULL);
} else if (IsDataAccessorConflict(old_type, type)) {
@@ -1325,7 +1387,7 @@ PreParser::Expression PreParser::ParseFunctionLiteral(bool* ok) {
duplicate_finder.AddAsciiSymbol(scanner_->literal_ascii_string(), 1);
} else {
prev_value =
- duplicate_finder.AddUC16Symbol(scanner_->literal_uc16_string(), 1);
+ duplicate_finder.AddUtf16Symbol(scanner_->literal_utf16_string(), 1);
}
if (prev_value != 0) {
@@ -1340,9 +1402,6 @@ PreParser::Expression PreParser::ParseFunctionLiteral(bool* ok) {
}
Expect(i::Token::RPAREN, CHECK_OK);
- Expect(i::Token::LBRACE, CHECK_OK);
- int function_block_pos = scanner_->location().beg_pos;
-
// Determine if the function will be lazily compiled.
// Currently only happens to top-level functions.
// Optimistically assume that all top-level functions are lazily compiled.
@@ -1351,26 +1410,15 @@ PreParser::Expression PreParser::ParseFunctionLiteral(bool* ok) {
!parenthesized_function_);
parenthesized_function_ = false;
+ Expect(i::Token::LBRACE, CHECK_OK);
if (is_lazily_compiled) {
- log_->PauseRecording();
- ParseSourceElements(i::Token::RBRACE, ok);
- log_->ResumeRecording();
- if (!*ok) Expression::Default();
-
- Expect(i::Token::RBRACE, CHECK_OK);
-
- // Position right after terminal '}'.
- int end_pos = scanner_->location().end_pos;
- log_->LogFunction(function_block_pos, end_pos,
- function_scope.materialized_literal_count(),
- function_scope.expected_properties(),
- strict_mode_flag());
+ ParseLazyFunctionLiteralBody(CHECK_OK);
} else {
- ParseSourceElements(i::Token::RBRACE, CHECK_OK);
- Expect(i::Token::RBRACE, CHECK_OK);
+ ParseSourceElements(i::Token::RBRACE, ok);
}
+ Expect(i::Token::RBRACE, CHECK_OK);
- if (strict_mode()) {
+ if (!is_classic_mode()) {
int end_position = scanner_->location().end_pos;
CheckOctalLiteral(start_position, end_position, CHECK_OK);
CheckDelayedStrictModeViolation(start_position, end_position, CHECK_OK);
@@ -1381,6 +1429,23 @@ PreParser::Expression PreParser::ParseFunctionLiteral(bool* ok) {
}
+void PreParser::ParseLazyFunctionLiteralBody(bool* ok) {
+ int body_start = scanner_->location().beg_pos;
+ log_->PauseRecording();
+ ParseSourceElements(i::Token::RBRACE, ok);
+ log_->ResumeRecording();
+ if (!*ok) return;
+
+ // Position right after terminal '}'.
+ ASSERT_EQ(i::Token::RBRACE, scanner_->peek());
+ int body_end = scanner_->peek_location().end_pos;
+ log_->LogFunction(body_start, body_end,
+ scope_->materialized_literal_count(),
+ scope_->expected_properties(),
+ language_mode());
+}
+
+
PreParser::Expression PreParser::ParseV8Intrinsic(bool* ok) {
// CallRuntime ::
// '%' Identifier Arguments
@@ -1420,7 +1485,7 @@ void PreParser::LogSymbol() {
if (scanner_->is_literal_ascii()) {
log_->LogAsciiSymbol(identifier_pos, scanner_->literal_ascii_string());
} else {
- log_->LogUC16Symbol(identifier_pos, scanner_->literal_uc16_string());
+ log_->LogUtf16Symbol(identifier_pos, scanner_->literal_utf16_string());
}
}
@@ -1474,7 +1539,7 @@ PreParser::Identifier PreParser::ParseIdentifier(bool* ok) {
return GetIdentifierSymbol();
}
case i::Token::FUTURE_STRICT_RESERVED_WORD:
- if (strict_mode()) {
+ if (!is_classic_mode()) {
i::Scanner::Location location = scanner_->location();
ReportMessageAt(location.beg_pos, location.end_pos,
"strict_reserved_word", NULL);
@@ -1493,7 +1558,7 @@ PreParser::Identifier PreParser::ParseIdentifier(bool* ok) {
void PreParser::SetStrictModeViolation(i::Scanner::Location location,
const char* type,
bool* ok) {
- if (strict_mode()) {
+ if (!is_classic_mode()) {
ReportMessageAt(location, type, NULL);
*ok = false;
return;
@@ -1533,7 +1598,7 @@ void PreParser::StrictModeIdentifierViolation(i::Scanner::Location location,
} else if (identifier.IsFutureStrictReserved()) {
type = "strict_reserved_word";
}
- if (strict_mode()) {
+ if (!is_classic_mode()) {
ReportMessageAt(location, type, NULL);
*ok = false;
return;
@@ -1592,7 +1657,7 @@ int DuplicateFinder::AddAsciiSymbol(i::Vector<const char> key, int value) {
return AddSymbol(i::Vector<const byte>::cast(key), true, value);
}
-int DuplicateFinder::AddUC16Symbol(i::Vector<const uint16_t> key, int value) {
+int DuplicateFinder::AddUtf16Symbol(i::Vector<const uint16_t> key, int value) {
return AddSymbol(i::Vector<const byte>::cast(key), false, value);
}
diff --git a/src/3rdparty/v8/src/preparser.h b/src/3rdparty/v8/src/preparser.h
index 647a142..f3a4347 100644
--- a/src/3rdparty/v8/src/preparser.h
+++ b/src/3rdparty/v8/src/preparser.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,6 +28,7 @@
#ifndef V8_PREPARSER_H
#define V8_PREPARSER_H
+#include "hashmap.h"
#include "token.h"
#include "scanner.h"
@@ -64,7 +65,7 @@ class DuplicateFinder {
map_(&Match) { }
int AddAsciiSymbol(i::Vector<const char> key, int value);
- int AddUC16Symbol(i::Vector<const uint16_t> key, int value);
+ int AddUtf16Symbol(i::Vector<const uint16_t> key, int value);
// Add a a number literal by converting it (if necessary)
// to the string that ToString(ToNumber(literal)) would generate.
// and then adding that string with AddAsciiSymbol.
@@ -110,6 +111,26 @@ class PreParser {
kPreParseSuccess
};
+
+ PreParser(i::Scanner* scanner,
+ i::ParserRecorder* log,
+ uintptr_t stack_limit,
+ bool allow_lazy,
+ bool allow_natives_syntax,
+ bool allow_modules)
+ : scanner_(scanner),
+ log_(log),
+ scope_(NULL),
+ stack_limit_(stack_limit),
+ strict_mode_violation_location_(i::Scanner::Location::invalid()),
+ strict_mode_violation_type_(NULL),
+ stack_overflow_(false),
+ allow_lazy_(allow_lazy),
+ allow_modules_(allow_modules),
+ allow_natives_syntax_(allow_natives_syntax),
+ parenthesized_function_(false),
+ harmony_scoping_(scanner->HarmonyScoping()) { }
+
~PreParser() {}
// Pre-parse the program from the character stream; returns true on
@@ -122,10 +143,22 @@ class PreParser {
uintptr_t stack_limit) {
bool allow_lazy = (flags & i::kAllowLazy) != 0;
bool allow_natives_syntax = (flags & i::kAllowNativesSyntax) != 0;
- return PreParser(scanner, log, stack_limit,
- allow_lazy, allow_natives_syntax).PreParse();
+ bool allow_modules = (flags & i::kAllowModules) != 0;
+ return PreParser(scanner, log, stack_limit, allow_lazy,
+ allow_natives_syntax, allow_modules).PreParse();
}
+ // Parses a single function literal, from the opening parentheses before
+ // parameters to the closing brace after the body.
+ // Returns a FunctionEntry describing the body of the funciton in enough
+ // detail that it can be lazily compiled.
+ // The scanner is expected to have matched the "function" keyword and
+ // parameters, and have consumed the initial '{'.
+ // At return, unless an error occured, the scanner is positioned before the
+ // the final '}'.
+ PreParseResult PreParseLazyFunction(i::LanguageMode mode,
+ i::ParserRecorder* log);
+
private:
// Used to detect duplicates in object literals. Each of the values
// kGetterProperty, kSetterProperty and kValueProperty represents
@@ -417,8 +450,8 @@ class PreParser {
materialized_literal_count_(0),
expected_properties_(0),
with_nesting_count_(0),
- strict_mode_flag_((prev_ != NULL) ? prev_->strict_mode_flag()
- : i::kNonStrictMode) {
+ language_mode_(
+ (prev_ != NULL) ? prev_->language_mode() : i::CLASSIC_MODE) {
*variable = this;
}
~Scope() { *variable_ = prev_; }
@@ -428,12 +461,14 @@ class PreParser {
int expected_properties() { return expected_properties_; }
int materialized_literal_count() { return materialized_literal_count_; }
bool IsInsideWith() { return with_nesting_count_ != 0; }
- bool is_strict_mode() { return strict_mode_flag_ == i::kStrictMode; }
- i::StrictModeFlag strict_mode_flag() {
- return strict_mode_flag_;
+ bool is_classic_mode() {
+ return language_mode_ == i::CLASSIC_MODE;
+ }
+ i::LanguageMode language_mode() {
+ return language_mode_;
}
- void set_strict_mode_flag(i::StrictModeFlag strict_mode_flag) {
- strict_mode_flag_ = strict_mode_flag;
+ void set_language_mode(i::LanguageMode language_mode) {
+ language_mode_ = language_mode;
}
void EnterWith() { with_nesting_count_++; }
void LeaveWith() { with_nesting_count_--; }
@@ -445,27 +480,9 @@ class PreParser {
int materialized_literal_count_;
int expected_properties_;
int with_nesting_count_;
- i::StrictModeFlag strict_mode_flag_;
+ i::LanguageMode language_mode_;
};
- // Private constructor only used in PreParseProgram.
- PreParser(i::Scanner* scanner,
- i::ParserRecorder* log,
- uintptr_t stack_limit,
- bool allow_lazy,
- bool allow_natives_syntax)
- : scanner_(scanner),
- log_(log),
- scope_(NULL),
- stack_limit_(stack_limit),
- strict_mode_violation_location_(i::Scanner::Location::invalid()),
- strict_mode_violation_type_(NULL),
- stack_overflow_(false),
- allow_lazy_(allow_lazy),
- allow_natives_syntax_(allow_natives_syntax),
- parenthesized_function_(false),
- harmony_scoping_(scanner->HarmonyScoping()) { }
-
// Preparse the program. Only called in PreParseProgram after creating
// the instance.
PreParseResult PreParse() {
@@ -476,7 +493,7 @@ class PreParser {
if (stack_overflow_) return kPreParseStackOverflow;
if (!ok) {
ReportUnexpectedToken(scanner_->current_token());
- } else if (scope_->is_strict_mode()) {
+ } else if (!scope_->is_classic_mode()) {
CheckOctalLiteral(start_position, scanner_->location().end_pos, &ok);
}
return kPreParseSuccess;
@@ -545,6 +562,7 @@ class PreParser {
Arguments ParseArguments(bool* ok);
Expression ParseFunctionLiteral(bool* ok);
+ void ParseLazyFunctionLiteralBody(bool* ok);
Identifier ParseIdentifier(bool* ok);
Identifier ParseIdentifierName(bool* ok);
@@ -580,13 +598,19 @@ class PreParser {
bool peek_any_identifier();
- void set_strict_mode() {
- scope_->set_strict_mode_flag(i::kStrictMode);
+ void set_language_mode(i::LanguageMode language_mode) {
+ scope_->set_language_mode(language_mode);
+ }
+
+ bool is_classic_mode() {
+ return scope_->language_mode() == i::CLASSIC_MODE;
}
- bool strict_mode() { return scope_->strict_mode_flag() == i::kStrictMode; }
+ bool is_extended_mode() {
+ return scope_->language_mode() == i::EXTENDED_MODE;
+ }
- i::StrictModeFlag strict_mode_flag() { return scope_->strict_mode_flag(); }
+ i::LanguageMode language_mode() { return scope_->language_mode(); }
void Consume(i::Token::Value token) { Next(); }
@@ -610,7 +634,7 @@ class PreParser {
void SetStrictModeViolation(i::Scanner::Location,
const char* type,
- bool *ok);
+ bool* ok);
void CheckDelayedStrictModeViolation(int beg_pos, int end_pos, bool* ok);
@@ -627,6 +651,7 @@ class PreParser {
const char* strict_mode_violation_type_;
bool stack_overflow_;
bool allow_lazy_;
+ bool allow_modules_;
bool allow_natives_syntax_;
bool parenthesized_function_;
bool harmony_scoping_;
diff --git a/src/3rdparty/v8/src/prettyprinter.cc b/src/3rdparty/v8/src/prettyprinter.cc
index 73812fd..0d8dadc 100644
--- a/src/3rdparty/v8/src/prettyprinter.cc
+++ b/src/3rdparty/v8/src/prettyprinter.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,17 +58,70 @@ void PrettyPrinter::VisitBlock(Block* node) {
}
-void PrettyPrinter::VisitDeclaration(Declaration* node) {
+void PrettyPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
Print("var ");
PrintLiteral(node->proxy()->name(), false);
- if (node->fun() != NULL) {
- Print(" = ");
- PrintFunctionLiteral(node->fun());
- }
Print(";");
}
+void PrettyPrinter::VisitFunctionDeclaration(FunctionDeclaration* node) {
+ Print("function ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" = ");
+ PrintFunctionLiteral(node->fun());
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
+ Print("module ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" = ");
+ Visit(node->module());
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitImportDeclaration(ImportDeclaration* node) {
+ Print("import ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(" from ");
+ Visit(node->module());
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitExportDeclaration(ExportDeclaration* node) {
+ Print("export ");
+ PrintLiteral(node->proxy()->name(), false);
+ Print(";");
+}
+
+
+void PrettyPrinter::VisitModuleLiteral(ModuleLiteral* node) {
+ VisitBlock(node->body());
+}
+
+
+void PrettyPrinter::VisitModuleVariable(ModuleVariable* node) {
+ Visit(node->proxy());
+}
+
+
+void PrettyPrinter::VisitModulePath(ModulePath* node) {
+ Visit(node->module());
+ Print(".");
+ PrintLiteral(node->name(), false);
+}
+
+
+void PrettyPrinter::VisitModuleUrl(ModuleUrl* node) {
+ Print("at ");
+ PrintLiteral(node->url(), true);
+}
+
+
void PrettyPrinter::VisitExpressionStatement(ExpressionStatement* node) {
Visit(node->expression());
Print(";");
@@ -447,6 +500,7 @@ void PrettyPrinter::Print(const char* format, ...) {
void PrettyPrinter::PrintStatements(ZoneList<Statement*>* statements) {
+ if (statements == NULL) return;
for (int i = 0; i < statements->length(); i++) {
if (i != 0) Print(" ");
Visit(statements->at(i));
@@ -618,9 +672,6 @@ void AstPrinter::PrintLiteralWithModeIndented(const char* info,
EmbeddedVector<char, 256> buf;
int pos = OS::SNPrintF(buf, "%s (mode = %s", info,
Variable::Mode2String(var->mode()));
- if (var->is_qml_global()) {
- pos += OS::SNPrintF(buf + pos, ":QML");
- }
OS::SNPrintF(buf + pos, ")");
PrintLiteralIndented(buf.start(), value, true);
}
@@ -713,20 +764,61 @@ void AstPrinter::VisitBlock(Block* node) {
}
-void AstPrinter::VisitDeclaration(Declaration* node) {
- if (node->fun() == NULL) {
- // var or const declarations
- PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
- node->proxy()->var(),
- node->proxy()->name());
- } else {
- // function declarations
- PrintIndented("FUNCTION ");
- PrintLiteral(node->proxy()->name(), true);
- Print(" = function ");
- PrintLiteral(node->fun()->name(), false);
- Print("\n");
- }
+void AstPrinter::VisitVariableDeclaration(VariableDeclaration* node) {
+ PrintLiteralWithModeIndented(Variable::Mode2String(node->mode()),
+ node->proxy()->var(),
+ node->proxy()->name());
+}
+
+
+void AstPrinter::VisitFunctionDeclaration(FunctionDeclaration* node) {
+ PrintIndented("FUNCTION ");
+ PrintLiteral(node->proxy()->name(), true);
+ Print(" = function ");
+ PrintLiteral(node->fun()->name(), false);
+ Print("\n");
+}
+
+
+void AstPrinter::VisitModuleDeclaration(ModuleDeclaration* node) {
+ IndentedScope indent(this, "MODULE");
+ PrintLiteralIndented("NAME", node->proxy()->name(), true);
+ Visit(node->module());
+}
+
+
+void AstPrinter::VisitImportDeclaration(ImportDeclaration* node) {
+ IndentedScope indent(this, "IMPORT");
+ PrintLiteralIndented("NAME", node->proxy()->name(), true);
+ Visit(node->module());
+}
+
+
+void AstPrinter::VisitExportDeclaration(ExportDeclaration* node) {
+ IndentedScope indent(this, "EXPORT ");
+ PrintLiteral(node->proxy()->name(), true);
+}
+
+
+void AstPrinter::VisitModuleLiteral(ModuleLiteral* node) {
+ VisitBlock(node->body());
+}
+
+
+void AstPrinter::VisitModuleVariable(ModuleVariable* node) {
+ Visit(node->proxy());
+}
+
+
+void AstPrinter::VisitModulePath(ModulePath* node) {
+ IndentedScope indent(this, "PATH");
+ PrintIndentedVisit("MODULE", node->module());
+ PrintLiteralIndented("NAME", node->name(), false);
+}
+
+
+void AstPrinter::VisitModuleUrl(ModuleUrl* node) {
+ PrintLiteralIndented("URL", node->url(), true);
}
@@ -1020,393 +1112,6 @@ void AstPrinter::VisitThisFunction(ThisFunction* node) {
IndentedScope indent(this, "THIS-FUNCTION");
}
-
-TagScope::TagScope(JsonAstBuilder* builder, const char* name)
- : builder_(builder), next_(builder->tag()), has_body_(false) {
- if (next_ != NULL) {
- next_->use();
- builder->Print(",\n");
- }
- builder->set_tag(this);
- builder->PrintIndented("[");
- builder->Print("\"%s\"", name);
- builder->increase_indent(JsonAstBuilder::kTagIndentSize);
-}
-
-
-TagScope::~TagScope() {
- builder_->decrease_indent(JsonAstBuilder::kTagIndentSize);
- if (has_body_) {
- builder_->Print("\n");
- builder_->PrintIndented("]");
- } else {
- builder_->Print("]");
- }
- builder_->set_tag(next_);
-}
-
-
-AttributesScope::AttributesScope(JsonAstBuilder* builder)
- : builder_(builder), attribute_count_(0) {
- builder->set_attributes(this);
- builder->tag()->use();
- builder->Print(",\n");
- builder->PrintIndented("{");
- builder->increase_indent(JsonAstBuilder::kAttributesIndentSize);
-}
-
-
-AttributesScope::~AttributesScope() {
- builder_->decrease_indent(JsonAstBuilder::kAttributesIndentSize);
- if (attribute_count_ > 1) {
- builder_->Print("\n");
- builder_->PrintIndented("}");
- } else {
- builder_->Print("}");
- }
- builder_->set_attributes(NULL);
-}
-
-
-const char* JsonAstBuilder::BuildProgram(FunctionLiteral* program) {
- Init();
- Visit(program);
- Print("\n");
- return Output();
-}
-
-
-void JsonAstBuilder::AddAttributePrefix(const char* name) {
- if (attributes()->is_used()) {
- Print(",\n");
- PrintIndented("\"");
- } else {
- Print("\"");
- }
- Print("%s\":", name);
- attributes()->use();
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, Handle<String> value) {
- SmartArrayPointer<char> value_string = value->ToCString();
- AddAttributePrefix(name);
- Print("\"%s\"", *value_string);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, const char* value) {
- AddAttributePrefix(name);
- Print("\"%s\"", value);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, int value) {
- AddAttributePrefix(name);
- Print("%d", value);
-}
-
-
-void JsonAstBuilder::AddAttribute(const char* name, bool value) {
- AddAttributePrefix(name);
- Print(value ? "true" : "false");
-}
-
-
-void JsonAstBuilder::VisitBlock(Block* stmt) {
- TagScope tag(this, "Block");
- VisitStatements(stmt->statements());
-}
-
-
-void JsonAstBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
- TagScope tag(this, "ExpressionStatement");
- Visit(stmt->expression());
-}
-
-
-void JsonAstBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
- TagScope tag(this, "EmptyStatement");
-}
-
-
-void JsonAstBuilder::VisitIfStatement(IfStatement* stmt) {
- TagScope tag(this, "IfStatement");
- Visit(stmt->condition());
- Visit(stmt->then_statement());
- Visit(stmt->else_statement());
-}
-
-
-void JsonAstBuilder::VisitContinueStatement(ContinueStatement* stmt) {
- TagScope tag(this, "ContinueStatement");
-}
-
-
-void JsonAstBuilder::VisitBreakStatement(BreakStatement* stmt) {
- TagScope tag(this, "BreakStatement");
-}
-
-
-void JsonAstBuilder::VisitReturnStatement(ReturnStatement* stmt) {
- TagScope tag(this, "ReturnStatement");
- Visit(stmt->expression());
-}
-
-
-void JsonAstBuilder::VisitWithStatement(WithStatement* stmt) {
- TagScope tag(this, "WithStatement");
- Visit(stmt->expression());
- Visit(stmt->statement());
-}
-
-
-void JsonAstBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
- TagScope tag(this, "SwitchStatement");
-}
-
-
-void JsonAstBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
- TagScope tag(this, "DoWhileStatement");
- Visit(stmt->body());
- Visit(stmt->cond());
-}
-
-
-void JsonAstBuilder::VisitWhileStatement(WhileStatement* stmt) {
- TagScope tag(this, "WhileStatement");
- Visit(stmt->cond());
- Visit(stmt->body());
-}
-
-
-void JsonAstBuilder::VisitForStatement(ForStatement* stmt) {
- TagScope tag(this, "ForStatement");
- if (stmt->init() != NULL) Visit(stmt->init());
- if (stmt->cond() != NULL) Visit(stmt->cond());
- Visit(stmt->body());
- if (stmt->next() != NULL) Visit(stmt->next());
-}
-
-
-void JsonAstBuilder::VisitForInStatement(ForInStatement* stmt) {
- TagScope tag(this, "ForInStatement");
- Visit(stmt->each());
- Visit(stmt->enumerable());
- Visit(stmt->body());
-}
-
-
-void JsonAstBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
- TagScope tag(this, "TryCatchStatement");
- { AttributesScope attributes(this);
- AddAttribute("variable", stmt->variable()->name());
- }
- Visit(stmt->try_block());
- Visit(stmt->catch_block());
-}
-
-
-void JsonAstBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
- TagScope tag(this, "TryFinallyStatement");
- Visit(stmt->try_block());
- Visit(stmt->finally_block());
-}
-
-
-void JsonAstBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
- TagScope tag(this, "DebuggerStatement");
-}
-
-
-void JsonAstBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
- TagScope tag(this, "FunctionLiteral");
- {
- AttributesScope attributes(this);
- AddAttribute("name", expr->name());
- }
- VisitDeclarations(expr->scope()->declarations());
- VisitStatements(expr->body());
-}
-
-
-void JsonAstBuilder::VisitSharedFunctionInfoLiteral(
- SharedFunctionInfoLiteral* expr) {
- TagScope tag(this, "SharedFunctionInfoLiteral");
-}
-
-
-void JsonAstBuilder::VisitConditional(Conditional* expr) {
- TagScope tag(this, "Conditional");
-}
-
-
-void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
- TagScope tag(this, "Variable");
- {
- AttributesScope attributes(this);
- Variable* var = expr->var();
- AddAttribute("name", var->name());
- switch (var->location()) {
- case Variable::UNALLOCATED:
- AddAttribute("location", "UNALLOCATED");
- break;
- case Variable::PARAMETER:
- AddAttribute("location", "PARAMETER");
- AddAttribute("index", var->index());
- break;
- case Variable::LOCAL:
- AddAttribute("location", "LOCAL");
- AddAttribute("index", var->index());
- break;
- case Variable::CONTEXT:
- AddAttribute("location", "CONTEXT");
- AddAttribute("index", var->index());
- break;
- case Variable::LOOKUP:
- AddAttribute("location", "LOOKUP");
- break;
- }
- }
-}
-
-
-void JsonAstBuilder::VisitLiteral(Literal* expr) {
- TagScope tag(this, "Literal");
- {
- AttributesScope attributes(this);
- Handle<Object> handle = expr->handle();
- if (handle->IsString()) {
- AddAttribute("handle", Handle<String>(String::cast(*handle)));
- } else if (handle->IsSmi()) {
- AddAttribute("handle", Smi::cast(*handle)->value());
- }
- }
-}
-
-
-void JsonAstBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
- TagScope tag(this, "RegExpLiteral");
-}
-
-
-void JsonAstBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
- TagScope tag(this, "ObjectLiteral");
-}
-
-
-void JsonAstBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
- TagScope tag(this, "ArrayLiteral");
-}
-
-
-void JsonAstBuilder::VisitAssignment(Assignment* expr) {
- TagScope tag(this, "Assignment");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->target());
- Visit(expr->value());
-}
-
-
-void JsonAstBuilder::VisitThrow(Throw* expr) {
- TagScope tag(this, "Throw");
- Visit(expr->exception());
-}
-
-
-void JsonAstBuilder::VisitProperty(Property* expr) {
- TagScope tag(this, "Property");
- Visit(expr->obj());
- Visit(expr->key());
-}
-
-
-void JsonAstBuilder::VisitCall(Call* expr) {
- TagScope tag(this, "Call");
- Visit(expr->expression());
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitCallNew(CallNew* expr) {
- TagScope tag(this, "CallNew");
- Visit(expr->expression());
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitCallRuntime(CallRuntime* expr) {
- TagScope tag(this, "CallRuntime");
- {
- AttributesScope attributes(this);
- AddAttribute("name", expr->name());
- }
- VisitExpressions(expr->arguments());
-}
-
-
-void JsonAstBuilder::VisitUnaryOperation(UnaryOperation* expr) {
- TagScope tag(this, "UnaryOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->expression());
-}
-
-
-void JsonAstBuilder::VisitCountOperation(CountOperation* expr) {
- TagScope tag(this, "CountOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("is_prefix", expr->is_prefix());
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->expression());
-}
-
-
-void JsonAstBuilder::VisitBinaryOperation(BinaryOperation* expr) {
- TagScope tag(this, "BinaryOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->left());
- Visit(expr->right());
-}
-
-
-void JsonAstBuilder::VisitCompareOperation(CompareOperation* expr) {
- TagScope tag(this, "CompareOperation");
- {
- AttributesScope attributes(this);
- AddAttribute("op", Token::Name(expr->op()));
- }
- Visit(expr->left());
- Visit(expr->right());
-}
-
-
-void JsonAstBuilder::VisitThisFunction(ThisFunction* expr) {
- TagScope tag(this, "ThisFunction");
-}
-
-
-void JsonAstBuilder::VisitDeclaration(Declaration* decl) {
- TagScope tag(this, "Declaration");
- {
- AttributesScope attributes(this);
- AddAttribute("mode", Variable::Mode2String(decl->mode()));
- }
- Visit(decl->proxy());
- if (decl->fun() != NULL) Visit(decl->fun());
-}
-
-
#endif // DEBUG
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/prettyprinter.h b/src/3rdparty/v8/src/prettyprinter.h
index a26c48e..9ac7257 100644
--- a/src/3rdparty/v8/src/prettyprinter.h
+++ b/src/3rdparty/v8/src/prettyprinter.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -112,107 +112,6 @@ class AstPrinter: public PrettyPrinter {
int indent_;
};
-
-// Forward declaration of helper classes.
-class TagScope;
-class AttributesScope;
-
-// Build a C string containing a JSON representation of a function's
-// AST. The representation is based on JsonML (www.jsonml.org).
-class JsonAstBuilder: public PrettyPrinter {
- public:
- JsonAstBuilder()
- : indent_(0), top_tag_scope_(NULL), attributes_scope_(NULL) {
- }
- virtual ~JsonAstBuilder() {}
-
- // Controls the indentation of subsequent lines of a tag body after
- // the first line.
- static const int kTagIndentSize = 2;
-
- // Controls the indentation of subsequent lines of an attributes
- // blocks's body after the first line.
- static const int kAttributesIndentSize = 1;
-
- // Construct a JSON representation of a function literal.
- const char* BuildProgram(FunctionLiteral* program);
-
- // Print text indented by the current indentation level.
- void PrintIndented(const char* text) { Print("%*s%s", indent_, "", text); }
-
- // Change the indentation level.
- void increase_indent(int amount) { indent_ += amount; }
- void decrease_indent(int amount) { indent_ -= amount; }
-
- // The builder maintains a stack of opened AST node constructors.
- // Each node constructor corresponds to a JsonML tag.
- TagScope* tag() { return top_tag_scope_; }
- void set_tag(TagScope* scope) { top_tag_scope_ = scope; }
-
- // The builder maintains a pointer to the currently opened attributes
- // of current AST node or NULL if the attributes are not opened.
- AttributesScope* attributes() { return attributes_scope_; }
- void set_attributes(AttributesScope* scope) { attributes_scope_ = scope; }
-
- // Add an attribute to the currently opened attributes.
- void AddAttribute(const char* name, Handle<String> value);
- void AddAttribute(const char* name, const char* value);
- void AddAttribute(const char* name, int value);
- void AddAttribute(const char* name, bool value);
-
- // AST node visit functions.
-#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
- AST_NODE_LIST(DECLARE_VISIT)
-#undef DECLARE_VISIT
-
- private:
- int indent_;
- TagScope* top_tag_scope_;
- AttributesScope* attributes_scope_;
-
- // Utility function used by AddAttribute implementations.
- void AddAttributePrefix(const char* name);
-};
-
-
-// The JSON AST builder keeps a stack of open element tags (AST node
-// constructors from the current iteration point to the root of the
-// AST). TagScope is a helper class to manage the opening and closing
-// of tags, the indentation of their bodies, and comma separating their
-// contents.
-class TagScope BASE_EMBEDDED {
- public:
- TagScope(JsonAstBuilder* builder, const char* name);
- ~TagScope();
-
- void use() { has_body_ = true; }
-
- private:
- JsonAstBuilder* builder_;
- TagScope* next_;
- bool has_body_;
-};
-
-
-// AttributesScope is a helper class to manage the opening and closing
-// of attribute blocks, the indentation of their bodies, and comma
-// separating their contents. JsonAstBuilder::AddAttribute adds an
-// attribute to the currently open AttributesScope. They cannot be
-// nested so the builder keeps an optional single scope rather than a
-// stack.
-class AttributesScope BASE_EMBEDDED {
- public:
- explicit AttributesScope(JsonAstBuilder* builder);
- ~AttributesScope();
-
- bool is_used() { return attribute_count_ > 0; }
- void use() { ++attribute_count_; }
-
- private:
- JsonAstBuilder* builder_;
- int attribute_count_;
-};
-
#endif // DEBUG
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/profile-generator-inl.h b/src/3rdparty/v8/src/profile-generator-inl.h
index 88d6e87..65369be 100644
--- a/src/3rdparty/v8/src/profile-generator-inl.h
+++ b/src/3rdparty/v8/src/profile-generator-inl.h
@@ -95,12 +95,23 @@ CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
}
-uint64_t HeapEntry::id() {
- union {
- Id stored_id;
- uint64_t returned_id;
- } id_adaptor = {id_};
- return id_adaptor.returned_id;
+SnapshotObjectId HeapObjectsMap::GetNthGcSubrootId(int delta) {
+ return kGcRootsFirstSubrootId + delta * kObjectIdStep;
+}
+
+
+HeapObject* V8HeapExplorer::GetNthGcSubrootObject(int delta) {
+ return reinterpret_cast<HeapObject*>(
+ reinterpret_cast<char*>(kFirstGcSubrootObject) +
+ delta * HeapObjectsMap::kObjectIdStep);
+}
+
+
+int V8HeapExplorer::GetGcSubrootOrder(HeapObject* subroot) {
+ return static_cast<int>(
+ (reinterpret_cast<char*>(subroot) -
+ reinterpret_cast<char*>(kFirstGcSubrootObject)) /
+ HeapObjectsMap::kObjectIdStep);
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/profile-generator.cc b/src/3rdparty/v8/src/profile-generator.cc
index 9812c26..683fec1 100644
--- a/src/3rdparty/v8/src/profile-generator.cc
+++ b/src/3rdparty/v8/src/profile-generator.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -110,7 +110,8 @@ const char* StringsStorage::GetCopy(const char* src) {
Vector<char> dst = Vector<char>::New(len + 1);
OS::StrNCpy(dst, src, len);
dst[len] = '\0';
- uint32_t hash = HashSequentialString(dst.start(), len);
+ uint32_t hash =
+ HashSequentialString(dst.start(), len, HEAP->HashSeed());
return AddOrDisposeString(dst.start(), hash);
}
@@ -143,16 +144,20 @@ const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
DeleteArray(str.start());
return format;
}
- uint32_t hash = HashSequentialString(str.start(), len);
+ uint32_t hash = HashSequentialString(
+ str.start(), len, HEAP->HashSeed());
return AddOrDisposeString(str.start(), hash);
}
const char* StringsStorage::GetName(String* name) {
if (name->IsString()) {
- return AddOrDisposeString(
- name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).Detach(),
- name->Hash());
+ int length = Min(kMaxNameSize, name->length());
+ SmartArrayPointer<char> data =
+ name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
+ uint32_t hash =
+ HashSequentialString(*data, length, name->GetHeap()->HashSeed());
+ return AddOrDisposeString(data.Detach(), hash);
}
return "";
}
@@ -176,18 +181,21 @@ void CodeEntry::CopyData(const CodeEntry& source) {
uint32_t CodeEntry::GetCallUid() const {
- uint32_t hash = ComputeIntegerHash(tag_);
+ uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
if (shared_id_ != 0) {
- hash ^= ComputeIntegerHash(
- static_cast<uint32_t>(shared_id_));
+ hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
+ v8::internal::kZeroHashSeed);
} else {
hash ^= ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
+ v8::internal::kZeroHashSeed);
hash ^= ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
+ v8::internal::kZeroHashSeed);
hash ^= ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)));
- hash ^= ComputeIntegerHash(line_number_);
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
+ v8::internal::kZeroHashSeed);
+ hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
}
return hash;
}
@@ -896,7 +904,7 @@ void ProfileGenerator::RecordTickSample(const TickSample& sample) {
entry++;
}
- for (const Address *stack_pos = sample.stack,
+ for (const Address* stack_pos = sample.stack,
*stack_end = stack_pos + sample.frames_count;
stack_pos != stack_end;
++stack_pos) {
@@ -936,7 +944,7 @@ void HeapGraphEdge::Init(
void HeapGraphEdge::Init(int child_index, Type type, int index, HeapEntry* to) {
- ASSERT(type == kElement || type == kHidden);
+ ASSERT(type == kElement || type == kHidden || type == kWeak);
child_index_ = child_index;
type_ = type;
index_ = index;
@@ -957,25 +965,21 @@ HeapEntry* HeapGraphEdge::From() {
void HeapEntry::Init(HeapSnapshot* snapshot,
Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int self_size,
int children_count,
int retainers_count) {
snapshot_ = snapshot;
type_ = type;
- painted_ = kUnpainted;
+ painted_ = false;
name_ = name;
self_size_ = self_size;
retained_size_ = 0;
+ entry_index_ = -1;
children_count_ = children_count;
retainers_count_ = retainers_count;
dominator_ = NULL;
-
- union {
- uint64_t set_id;
- Id stored_id;
- } id_adaptor = {id};
- id_ = id_adaptor.stored_id;
+ id_ = id;
}
@@ -984,8 +988,8 @@ void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
const char* name,
HeapEntry* entry,
int retainer_index) {
- children_arr()[child_index].Init(child_index, type, name, entry);
- entry->retainers_arr()[retainer_index] = children_arr() + child_index;
+ children()[child_index].Init(child_index, type, name, entry);
+ entry->retainers()[retainer_index] = children_arr() + child_index;
}
@@ -994,22 +998,14 @@ void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
int index,
HeapEntry* entry,
int retainer_index) {
- children_arr()[child_index].Init(child_index, type, index, entry);
- entry->retainers_arr()[retainer_index] = children_arr() + child_index;
+ children()[child_index].Init(child_index, type, index, entry);
+ entry->retainers()[retainer_index] = children_arr() + child_index;
}
void HeapEntry::SetUnidirElementReference(
int child_index, int index, HeapEntry* entry) {
- children_arr()[child_index].Init(child_index, index, entry);
-}
-
-
-int HeapEntry::RetainedSize(bool exact) {
- if (exact && (retained_size_ & kExactRetainedSizeTag) == 0) {
- CalculateExactRetainedSize();
- }
- return retained_size_ & (~kExactRetainedSizeTag);
+ children()[child_index].Init(child_index, index, entry);
}
@@ -1018,41 +1014,11 @@ Handle<HeapObject> HeapEntry::GetHeapObject() {
}
-template<class Visitor>
-void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) {
- List<HeapEntry*> list(10);
- list.Add(this);
- this->paint_reachable();
- visitor->Apply(this);
- while (!list.is_empty()) {
- HeapEntry* entry = list.RemoveLast();
- Vector<HeapGraphEdge> children = entry->children();
- for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* child = children[i].to();
- if (!child->painted_reachable()) {
- list.Add(child);
- child->paint_reachable();
- visitor->Apply(child);
- }
- }
- }
-}
-
-
-class NullClass {
- public:
- void Apply(HeapEntry* entry) { }
-};
-
-void HeapEntry::PaintAllReachable() {
- NullClass null;
- ApplyAndPaintAllReachable(&null);
-}
-
-
-void HeapEntry::Print(int max_depth, int indent) {
- OS::Print("%6d %6d [%llu] ", self_size(), RetainedSize(false), id());
+void HeapEntry::Print(
+ const char* prefix, const char* edge_name, int max_depth, int indent) {
+ OS::Print("%6d %7d @%6llu %*c %s%s: ",
+ self_size(), retained_size(), id(),
+ indent, ' ', prefix, edge_name);
if (type() != kString) {
OS::Print("%s %.40s\n", TypeAsString(), name_);
} else {
@@ -1071,29 +1037,40 @@ void HeapEntry::Print(int max_depth, int indent) {
Vector<HeapGraphEdge> ch = children();
for (int i = 0; i < ch.length(); ++i) {
HeapGraphEdge& edge = ch[i];
+ const char* edge_prefix = "";
+ EmbeddedVector<char, 64> index;
+ const char* edge_name = index.start();
switch (edge.type()) {
case HeapGraphEdge::kContextVariable:
- OS::Print(" %*c #%s: ", indent, ' ', edge.name());
+ edge_prefix = "#";
+ edge_name = edge.name();
break;
case HeapGraphEdge::kElement:
- OS::Print(" %*c %d: ", indent, ' ', edge.index());
+ OS::SNPrintF(index, "%d", edge.index());
break;
case HeapGraphEdge::kInternal:
- OS::Print(" %*c $%s: ", indent, ' ', edge.name());
+ edge_prefix = "$";
+ edge_name = edge.name();
break;
case HeapGraphEdge::kProperty:
- OS::Print(" %*c %s: ", indent, ' ', edge.name());
+ edge_name = edge.name();
break;
case HeapGraphEdge::kHidden:
- OS::Print(" %*c $%d: ", indent, ' ', edge.index());
+ edge_prefix = "$";
+ OS::SNPrintF(index, "%d", edge.index());
break;
case HeapGraphEdge::kShortcut:
- OS::Print(" %*c ^%s: ", indent, ' ', edge.name());
+ edge_prefix = "^";
+ edge_name = edge.name();
+ break;
+ case HeapGraphEdge::kWeak:
+ edge_prefix = "w";
+ OS::SNPrintF(index, "%d", edge.index());
break;
default:
- OS::Print("!!! unknown edge type: %d ", edge.type());
+ OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
}
- edge.to()->Print(max_depth, indent + 2);
+ edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
}
}
@@ -1109,73 +1086,21 @@ const char* HeapEntry::TypeAsString() {
case kRegExp: return "/regexp/";
case kHeapNumber: return "/number/";
case kNative: return "/native/";
+ case kSynthetic: return "/synthetic/";
default: return "???";
}
}
-int HeapEntry::EntriesSize(int entries_count,
- int children_count,
- int retainers_count) {
+size_t HeapEntry::EntriesSize(int entries_count,
+ int children_count,
+ int retainers_count) {
return sizeof(HeapEntry) * entries_count // NOLINT
+ sizeof(HeapGraphEdge) * children_count // NOLINT
+ sizeof(HeapGraphEdge*) * retainers_count; // NOLINT
}
-class RetainedSizeCalculator {
- public:
- RetainedSizeCalculator()
- : retained_size_(0) {
- }
-
- int retained_size() const { return retained_size_; }
-
- void Apply(HeapEntry** entry_ptr) {
- if ((*entry_ptr)->painted_reachable()) {
- retained_size_ += (*entry_ptr)->self_size();
- }
- }
-
- private:
- int retained_size_;
-};
-
-void HeapEntry::CalculateExactRetainedSize() {
- // To calculate retained size, first we paint all reachable nodes in
- // one color, then we paint (or re-paint) all nodes reachable from
- // other nodes with a different color. Then we sum up self sizes of
- // nodes painted with the first color.
- snapshot()->ClearPaint();
- PaintAllReachable();
-
- List<HeapEntry*> list(10);
- HeapEntry* root = snapshot()->root();
- if (this != root) {
- list.Add(root);
- root->paint_reachable_from_others();
- }
- while (!list.is_empty()) {
- HeapEntry* curr = list.RemoveLast();
- Vector<HeapGraphEdge> children = curr->children();
- for (int i = 0; i < children.length(); ++i) {
- if (children[i].type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* child = children[i].to();
- if (child != this && child->not_painted_reachable_from_others()) {
- list.Add(child);
- child->paint_reachable_from_others();
- }
- }
- }
-
- RetainedSizeCalculator ret_size_calc;
- snapshot()->IterateEntries(&ret_size_calc);
- retained_size_ = ret_size_calc.retained_size();
- ASSERT((retained_size_ & kExactRetainedSizeTag) == 0);
- retained_size_ |= kExactRetainedSizeTag;
-}
-
-
// It is very important to keep objects that form a heap snapshot
// as small as possible.
namespace { // Avoid littering the global namespace.
@@ -1185,11 +1110,14 @@ template <size_t ptr_size> struct SnapshotSizeConstants;
template <> struct SnapshotSizeConstants<4> {
static const int kExpectedHeapGraphEdgeSize = 12;
static const int kExpectedHeapEntrySize = 36;
+ static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
};
template <> struct SnapshotSizeConstants<8> {
static const int kExpectedHeapGraphEdgeSize = 24;
static const int kExpectedHeapEntrySize = 48;
+ static const uint64_t kMaxSerializableSnapshotRawSize =
+ static_cast<uint64_t>(6000) * MB;
};
} // namespace
@@ -1206,15 +1134,19 @@ HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
gc_roots_entry_(NULL),
natives_root_entry_(NULL),
raw_entries_(NULL),
- entries_sorted_(false) {
- STATIC_ASSERT(
+ max_snapshot_js_object_id_(0) {
+ STATIC_CHECK(
sizeof(HeapGraphEdge) ==
- SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapGraphEdgeSize); // NOLINT
- STATIC_ASSERT(
+ SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
+ STATIC_CHECK(
sizeof(HeapEntry) ==
- SnapshotSizeConstants<sizeof(void*)>::kExpectedHeapEntrySize); // NOLINT
+ SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
+ for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
+ gc_subroot_entries_[i] = NULL;
+ }
}
+
HeapSnapshot::~HeapSnapshot() {
DeleteArray(raw_entries_);
}
@@ -1226,6 +1158,11 @@ void HeapSnapshot::Delete() {
}
+void HeapSnapshot::RememberLastJSObjectId() {
+ max_snapshot_js_object_id_ = collection_->last_assigned_id();
+}
+
+
void HeapSnapshot::AllocateEntries(int entries_count,
int children_count,
int retainers_count) {
@@ -1240,6 +1177,7 @@ static void HeapEntryClearPaint(HeapEntry** entry_ptr) {
(*entry_ptr)->clear_paint();
}
+
void HeapSnapshot::ClearPaint() {
entries_.Iterate(HeapEntryClearPaint);
}
@@ -1247,6 +1185,7 @@ void HeapSnapshot::ClearPaint() {
HeapEntry* HeapSnapshot::AddRootEntry(int children_count) {
ASSERT(root_entry_ == NULL);
+ ASSERT(entries_.is_empty()); // Root entry must be the first one.
return (root_entry_ = AddEntry(HeapEntry::kObject,
"",
HeapObjectsMap::kInternalRootObjectId,
@@ -1268,13 +1207,15 @@ HeapEntry* HeapSnapshot::AddGcRootsEntry(int children_count,
}
-HeapEntry* HeapSnapshot::AddNativesRootEntry(int children_count,
- int retainers_count) {
- ASSERT(natives_root_entry_ == NULL);
- return (natives_root_entry_ = AddEntry(
+HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag,
+ int children_count,
+ int retainers_count) {
+ ASSERT(gc_subroot_entries_[tag] == NULL);
+ ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
+ return (gc_subroot_entries_[tag] = AddEntry(
HeapEntry::kObject,
- "(Native objects)",
- HeapObjectsMap::kNativesRootObjectId,
+ VisitorSynchronization::kTagNames[tag],
+ HeapObjectsMap::GetNthGcSubrootId(tag),
0,
children_count,
retainers_count));
@@ -1283,7 +1224,7 @@ HeapEntry* HeapSnapshot::AddNativesRootEntry(int children_count,
HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int size,
int children_count,
int retainers_count) {
@@ -1315,7 +1256,7 @@ HeapEntry* HeapSnapshot::GetNextEntryToInit() {
}
-HeapEntry* HeapSnapshot::GetEntryById(uint64_t id) {
+HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
// Perform a binary search by id.
@@ -1324,7 +1265,7 @@ HeapEntry* HeapSnapshot::GetEntryById(uint64_t id) {
while (low <= high) {
int mid =
(static_cast<unsigned int>(low) + static_cast<unsigned int>(high)) >> 1;
- uint64_t mid_id = entries_by_id->at(mid)->id();
+ SnapshotObjectId mid_id = entries_by_id->at(mid)->id();
if (mid_id > id)
high = mid - 1;
else if (mid_id < id)
@@ -1343,27 +1284,31 @@ static int SortByIds(const T* entry1_ptr,
return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
}
+
List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
- if (!entries_sorted_) {
- entries_.Sort(SortByIds);
- entries_sorted_ = true;
+ if (sorted_entries_.is_empty()) {
+ sorted_entries_.AddAll(entries_);
+ sorted_entries_.Sort(SortByIds);
}
- return &entries_;
+ return &sorted_entries_;
}
void HeapSnapshot::Print(int max_depth) {
- root()->Print(max_depth, 0);
+ root()->Print("", "", max_depth, 0);
}
// We split IDs on evens for embedder objects (see
// HeapObjectsMap::GenerateId) and odds for native objects.
-const uint64_t HeapObjectsMap::kInternalRootObjectId = 1;
-const uint64_t HeapObjectsMap::kGcRootsObjectId = 3;
-const uint64_t HeapObjectsMap::kNativesRootObjectId = 5;
-// Increase kFirstAvailableObjectId if new 'special' objects appear.
-const uint64_t HeapObjectsMap::kFirstAvailableObjectId = 7;
+const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
+const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
+ HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
+const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
+ HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
+const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
+ HeapObjectsMap::kGcRootsFirstSubrootId +
+ VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
HeapObjectsMap::HeapObjectsMap()
: initial_fill_mode_(true),
@@ -1383,13 +1328,13 @@ void HeapObjectsMap::SnapshotGenerationFinished() {
}
-uint64_t HeapObjectsMap::FindObject(Address addr) {
+SnapshotObjectId HeapObjectsMap::FindObject(Address addr) {
if (!initial_fill_mode_) {
- uint64_t existing = FindEntry(addr);
+ SnapshotObjectId existing = FindEntry(addr);
if (existing != 0) return existing;
}
- uint64_t id = next_id_;
- next_id_ += 2;
+ SnapshotObjectId id = next_id_;
+ next_id_ += kObjectIdStep;
AddEntry(addr, id);
return id;
}
@@ -1411,7 +1356,7 @@ void HeapObjectsMap::MoveObject(Address from, Address to) {
}
-void HeapObjectsMap::AddEntry(Address addr, uint64_t id) {
+void HeapObjectsMap::AddEntry(Address addr, SnapshotObjectId id) {
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
ASSERT(entry->value == NULL);
entry->value = reinterpret_cast<void*>(entries_->length());
@@ -1419,7 +1364,7 @@ void HeapObjectsMap::AddEntry(Address addr, uint64_t id) {
}
-uint64_t HeapObjectsMap::FindEntry(Address addr) {
+SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
if (entry != NULL) {
int entry_index =
@@ -1459,13 +1404,16 @@ void HeapObjectsMap::RemoveDeadEntries() {
}
-uint64_t HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
- uint64_t id = static_cast<uint64_t>(info->GetHash());
+SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
+ SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
const char* label = info->GetLabel();
- id ^= HashSequentialString(label, static_cast<int>(strlen(label)));
+ id ^= HashSequentialString(label,
+ static_cast<int>(strlen(label)),
+ HEAP->HashSeed());
intptr_t element_count = info->GetElementCount();
if (element_count != -1)
- id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count));
+ id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
+ v8::internal::kZeroHashSeed);
return id << 1;
}
@@ -1527,9 +1475,11 @@ void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
}
-Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(uint64_t id) {
+Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
+ SnapshotObjectId id) {
// First perform a full GC in order to avoid dead objects.
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "HeapSnapshotsCollection::FindHeapObjectById");
AssertNoAllocation no_allocation;
HeapObject* object = NULL;
HeapIterator iterator(HeapIterator::kFilterUnreachable);
@@ -1547,7 +1497,7 @@ Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(uint64_t id) {
}
-HeapEntry *const HeapEntriesMap::kHeapEntryPlaceholder =
+HeapEntry* const HeapEntriesMap::kHeapEntryPlaceholder =
reinterpret_cast<HeapEntry*>(1);
HeapEntriesMap::HeapEntriesMap()
@@ -1565,20 +1515,39 @@ HeapEntriesMap::~HeapEntriesMap() {
}
-void HeapEntriesMap::AllocateEntries() {
- for (HashMap::Entry* p = entries_.Start();
- p != NULL;
- p = entries_.Next(p)) {
- EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(p->value);
+void HeapEntriesMap::AllocateHeapEntryForMapEntry(HashMap::Entry* map_entry) {
+ EntryInfo* entry_info = reinterpret_cast<EntryInfo*>(map_entry->value);
entry_info->entry = entry_info->allocator->AllocateEntry(
- p->key,
+ map_entry->key,
entry_info->children_count,
entry_info->retainers_count);
ASSERT(entry_info->entry != NULL);
ASSERT(entry_info->entry != kHeapEntryPlaceholder);
entry_info->children_count = 0;
entry_info->retainers_count = 0;
+}
+
+
+void HeapEntriesMap::AllocateEntries(HeapThing root_object) {
+ HashMap::Entry* root_entry =
+ entries_.Lookup(root_object, Hash(root_object), false);
+ ASSERT(root_entry != NULL);
+ // Make sure root entry is allocated first.
+ AllocateHeapEntryForMapEntry(root_entry);
+ void* root_entry_value = root_entry->value;
+ // Remove the root object from map while iterating through other entries.
+ entries_.Remove(root_object, Hash(root_object));
+ root_entry = NULL;
+
+ for (HashMap::Entry* p = entries_.Start();
+ p != NULL;
+ p = entries_.Next(p)) {
+ AllocateHeapEntryForMapEntry(p);
}
+
+ // Insert root entry back.
+ root_entry = entries_.Lookup(root_object, Hash(root_object), true);
+ root_entry->value = root_entry_value;
}
@@ -1676,12 +1645,18 @@ void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
}
-HeapObject *const V8HeapExplorer::kInternalRootObject =
+HeapObject* const V8HeapExplorer::kInternalRootObject =
reinterpret_cast<HeapObject*>(
static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
-HeapObject *const V8HeapExplorer::kGcRootsObject =
+HeapObject* const V8HeapExplorer::kGcRootsObject =
reinterpret_cast<HeapObject*>(
static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
+HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
+ reinterpret_cast<HeapObject*>(
+ static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
+HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
+ reinterpret_cast<HeapObject*>(
+ static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
V8HeapExplorer::V8HeapExplorer(
@@ -1714,24 +1689,19 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
return snapshot_->AddRootEntry(children_count);
} else if (object == kGcRootsObject) {
return snapshot_->AddGcRootsEntry(children_count, retainers_count);
- } else if (object->IsJSGlobalObject()) {
- const char* tag = objects_tags_.GetTag(object);
- const char* name = collection_->names()->GetName(
- GetConstructorName(JSObject::cast(object)));
- if (tag != NULL) {
- name = collection_->names()->GetFormatted("%s / %s", name, tag);
- }
- return AddEntry(object,
- HeapEntry::kObject,
- name,
- children_count,
- retainers_count);
+ } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
+ return snapshot_->AddGcSubrootEntry(
+ GetGcSubrootOrder(object),
+ children_count,
+ retainers_count);
} else if (object->IsJSFunction()) {
JSFunction* func = JSFunction::cast(object);
SharedFunctionInfo* shared = func->shared();
+ const char* name = shared->bound() ? "native_bind" :
+ collection_->names()->GetName(String::cast(shared->name()));
return AddEntry(object,
HeapEntry::kClosure,
- collection_->names()->GetName(String::cast(shared->name())),
+ name,
children_count,
retainers_count);
} else if (object->IsJSRegExp()) {
@@ -1744,8 +1714,7 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
} else if (object->IsJSObject()) {
return AddEntry(object,
HeapEntry::kObject,
- collection_->names()->GetName(
- GetConstructorName(JSObject::cast(object))),
+ "",
children_count,
retainers_count);
} else if (object->IsString()) {
@@ -1777,6 +1746,18 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
: "",
children_count,
retainers_count);
+ } else if (object->IsGlobalContext()) {
+ return AddEntry(object,
+ HeapEntry::kHidden,
+ "system / GlobalContext",
+ children_count,
+ retainers_count);
+ } else if (object->IsContext()) {
+ return AddEntry(object,
+ HeapEntry::kHidden,
+ "system / Context",
+ children_count,
+ retainers_count);
} else if (object->IsFixedArray() ||
object->IsFixedDoubleArray() ||
object->IsByteArray() ||
@@ -1816,9 +1797,38 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
}
+class GcSubrootsEnumerator : public ObjectVisitor {
+ public:
+ GcSubrootsEnumerator(
+ SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
+ : filler_(filler),
+ explorer_(explorer),
+ previous_object_count_(0),
+ object_count_(0) {
+ }
+ void VisitPointers(Object** start, Object** end) {
+ object_count_ += end - start;
+ }
+ void Synchronize(VisitorSynchronization::SyncTag tag) {
+ // Skip empty subroots.
+ if (previous_object_count_ != object_count_) {
+ previous_object_count_ = object_count_;
+ filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
+ }
+ }
+ private:
+ SnapshotFillerInterface* filler_;
+ V8HeapExplorer* explorer_;
+ intptr_t previous_object_count_;
+ intptr_t object_count_;
+};
+
+
void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
filler->AddEntry(kInternalRootObject, this);
filler->AddEntry(kGcRootsObject, this);
+ GcSubrootsEnumerator enumerator(filler, this);
+ heap_->IterateRoots(&enumerator, VISIT_ALL);
}
@@ -1916,6 +1926,7 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
SetPropertyReference(
obj, entry,
heap_->prototype_symbol(), proto_or_map,
+ NULL,
JSFunction::kPrototypeOrInitialMapOffset);
} else {
SetPropertyReference(
@@ -1923,19 +1934,27 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
heap_->prototype_symbol(), js_fun->prototype());
}
}
+ SharedFunctionInfo* shared_info = js_fun->shared();
+ // JSFunction has either bindings or literals and never both.
+ bool bound = shared_info->bound();
+ TagObject(js_fun->literals_or_bindings(),
+ bound ? "(function bindings)" : "(function literals)");
+ SetInternalReference(js_fun, entry,
+ bound ? "bindings" : "literals",
+ js_fun->literals_or_bindings(),
+ JSFunction::kLiteralsOffset);
SetInternalReference(js_fun, entry,
- "shared", js_fun->shared(),
+ "shared", shared_info,
JSFunction::kSharedFunctionInfoOffset);
TagObject(js_fun->unchecked_context(), "(context)");
SetInternalReference(js_fun, entry,
"context", js_fun->unchecked_context(),
JSFunction::kContextOffset);
- TagObject(js_fun->literals_or_bindings(),
- "(function literals_or_bindings)");
- SetInternalReference(js_fun, entry,
- "literals_or_bindings",
- js_fun->literals_or_bindings(),
- JSFunction::kLiteralsOffset);
+ for (int i = JSFunction::kNonWeakFieldsEndOffset;
+ i < JSFunction::kSize;
+ i += kPointerSize) {
+ SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
+ }
}
TagObject(js_obj->properties(), "(object properties)");
SetInternalReference(obj, entry,
@@ -1962,8 +1981,14 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
"(context func. result caches)");
TagObject(context->normalized_map_cache(), "(context norm. map cache)");
TagObject(context->runtime_context(), "(runtime context)");
- TagObject(context->map_cache(), "(context map cache)");
TagObject(context->data(), "(context data)");
+ for (int i = Context::FIRST_WEAK_SLOT;
+ i < Context::GLOBAL_CONTEXT_SLOTS;
+ ++i) {
+ SetWeakReference(obj, entry,
+ i, context->get(i),
+ FixedArray::OffsetOfElementAt(i));
+ }
} else if (obj->IsMap()) {
Map* map = Map::cast(obj);
SetInternalReference(obj, entry,
@@ -2006,6 +2031,9 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
SetInternalReference(obj, entry,
"script", shared->script(),
SharedFunctionInfo::kScriptOffset);
+ SetWeakReference(obj, entry,
+ 1, shared->initial_map(),
+ SharedFunctionInfo::kInitialMapOffset);
} else if (obj->IsScript()) {
Script* script = Script::cast(obj);
SetInternalReference(obj, entry,
@@ -2024,17 +2052,6 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
SetInternalReference(obj, entry,
"line_ends", script->line_ends(),
Script::kLineEndsOffset);
- } else if (obj->IsDescriptorArray()) {
- DescriptorArray* desc_array = DescriptorArray::cast(obj);
- if (desc_array->length() > DescriptorArray::kContentArrayIndex) {
- Object* content_array =
- desc_array->get(DescriptorArray::kContentArrayIndex);
- TagObject(content_array, "(map descriptor content)");
- SetInternalReference(obj, entry,
- "content", content_array,
- FixedArray::OffsetOfElementAt(
- DescriptorArray::kContentArrayIndex));
- }
} else if (obj->IsCodeCache()) {
CodeCache* code_cache = CodeCache::cast(obj);
TagObject(code_cache->default_cache(), "(default code cache)");
@@ -2060,21 +2077,44 @@ void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj,
HeapEntry* entry) {
- if (js_obj->IsJSFunction()) {
- HandleScope hs;
- JSFunction* func = JSFunction::cast(js_obj);
- Context* context = func->context();
- ZoneScope zscope(Isolate::Current(), DELETE_ON_EXIT);
- SerializedScopeInfo* serialized_scope_info =
- context->closure()->shared()->scope_info();
- ScopeInfo<ZoneListAllocationPolicy> zone_scope_info(serialized_scope_info);
- int locals_number = zone_scope_info.NumberOfLocals();
- for (int i = 0; i < locals_number; ++i) {
- String* local_name = *zone_scope_info.LocalName(i);
- int idx = serialized_scope_info->ContextSlotIndex(local_name, NULL);
- if (idx >= 0 && idx < context->length()) {
- SetClosureReference(js_obj, entry, local_name, context->get(idx));
- }
+ if (!js_obj->IsJSFunction()) return;
+
+ JSFunction* func = JSFunction::cast(js_obj);
+ Context* context = func->context();
+ ScopeInfo* scope_info = context->closure()->shared()->scope_info();
+
+ if (func->shared()->bound()) {
+ FixedArray* bindings = func->function_bindings();
+ SetNativeBindReference(js_obj, entry, "bound_this",
+ bindings->get(JSFunction::kBoundThisIndex));
+ SetNativeBindReference(js_obj, entry, "bound_function",
+ bindings->get(JSFunction::kBoundFunctionIndex));
+ for (int i = JSFunction::kBoundArgumentsStartIndex;
+ i < bindings->length(); i++) {
+ const char* reference_name = collection_->names()->GetFormatted(
+ "bound_argument_%d",
+ i - JSFunction::kBoundArgumentsStartIndex);
+ SetNativeBindReference(js_obj, entry, reference_name,
+ bindings->get(i));
+ }
+ } else {
+ // Add context allocated locals.
+ int context_locals = scope_info->ContextLocalCount();
+ for (int i = 0; i < context_locals; ++i) {
+ String* local_name = scope_info->ContextLocalName(i);
+ int idx = Context::MIN_CONTEXT_SLOTS + i;
+ SetClosureReference(js_obj, entry, local_name, context->get(idx));
+ }
+
+ // Add function variable.
+ if (scope_info->HasFunctionName()) {
+ String* name = scope_info->FunctionName();
+ int idx = Context::MIN_CONTEXT_SLOTS + context_locals;
+#ifdef DEBUG
+ VariableMode mode;
+ ASSERT(idx == scope_info->FunctionContextSlotIndex(name, &mode));
+#endif
+ SetClosureReference(js_obj, entry, name, context->get(idx));
}
}
}
@@ -2092,6 +2132,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
SetPropertyReference(
js_obj, entry,
descs->GetKey(i), js_obj->InObjectPropertyAt(index),
+ NULL,
js_obj->GetInObjectPropertyOffset(index));
} else {
SetPropertyReference(
@@ -2105,7 +2146,29 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj,
js_obj, entry,
descs->GetKey(i), descs->GetConstantFunction(i));
break;
- default: ;
+ case CALLBACKS: {
+ Object* callback_obj = descs->GetValue(i);
+ if (callback_obj->IsAccessorPair()) {
+ AccessorPair* accessors = AccessorPair::cast(callback_obj);
+ if (Object* getter = accessors->getter()) {
+ SetPropertyReference(js_obj, entry, descs->GetKey(i),
+ getter, "get-%s");
+ }
+ if (Object* setter = accessors->setter()) {
+ SetPropertyReference(js_obj, entry, descs->GetKey(i),
+ setter, "set-%s");
+ }
+ }
+ break;
+ }
+ case NORMAL: // only in slow mode
+ case HANDLER: // only in lookup results, not in descriptors
+ case INTERCEPTOR: // only in lookup results, not in descriptors
+ case MAP_TRANSITION: // we do not care about transitions here...
+ case ELEMENTS_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR: // ... and not about "holes"
+ break;
}
}
} else {
@@ -2144,7 +2207,7 @@ void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj,
}
}
} else if (js_obj->HasDictionaryElements()) {
- NumberDictionary* dictionary = js_obj->element_dictionary();
+ SeededNumberDictionary* dictionary = js_obj->element_dictionary();
int length = dictionary->Capacity();
for (int i = 0; i < length; ++i) {
Object* k = dictionary->KeyAt(i);
@@ -2202,15 +2265,66 @@ HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
class RootsReferencesExtractor : public ObjectVisitor {
+ private:
+ struct IndexTag {
+ IndexTag(int index, VisitorSynchronization::SyncTag tag)
+ : index(index), tag(tag) { }
+ int index;
+ VisitorSynchronization::SyncTag tag;
+ };
+
public:
- explicit RootsReferencesExtractor(V8HeapExplorer* explorer)
- : explorer_(explorer) {
+ RootsReferencesExtractor()
+ : collecting_all_references_(false),
+ previous_reference_count_(0) {
}
+
void VisitPointers(Object** start, Object** end) {
- for (Object** p = start; p < end; p++) explorer_->SetGcRootsReference(*p);
+ if (collecting_all_references_) {
+ for (Object** p = start; p < end; p++) all_references_.Add(*p);
+ } else {
+ for (Object** p = start; p < end; p++) strong_references_.Add(*p);
+ }
}
+
+ void SetCollectingAllReferences() { collecting_all_references_ = true; }
+
+ void FillReferences(V8HeapExplorer* explorer) {
+ ASSERT(strong_references_.length() <= all_references_.length());
+ for (int i = 0; i < reference_tags_.length(); ++i) {
+ explorer->SetGcRootsReference(reference_tags_[i].tag);
+ }
+ int strong_index = 0, all_index = 0, tags_index = 0;
+ while (all_index < all_references_.length()) {
+ if (strong_index < strong_references_.length() &&
+ strong_references_[strong_index] == all_references_[all_index]) {
+ explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
+ false,
+ all_references_[all_index++]);
+ ++strong_index;
+ } else {
+ explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
+ true,
+ all_references_[all_index++]);
+ }
+ if (reference_tags_[tags_index].index == all_index) ++tags_index;
+ }
+ }
+
+ void Synchronize(VisitorSynchronization::SyncTag tag) {
+ if (collecting_all_references_ &&
+ previous_reference_count_ != all_references_.length()) {
+ previous_reference_count_ = all_references_.length();
+ reference_tags_.Add(IndexTag(previous_reference_count_, tag));
+ }
+ }
+
private:
- V8HeapExplorer* explorer_;
+ bool collecting_all_references_;
+ List<Object*> strong_references_;
+ List<Object*> all_references_;
+ int previous_reference_count_;
+ List<IndexTag> reference_tags_;
};
@@ -2235,13 +2349,42 @@ bool V8HeapExplorer::IterateAndExtractReferences(
return false;
}
SetRootGcRootsReference();
- RootsReferencesExtractor extractor(this);
+ RootsReferencesExtractor extractor;
+ heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
+ extractor.SetCollectingAllReferences();
heap_->IterateRoots(&extractor, VISIT_ALL);
+ extractor.FillReferences(this);
filler_ = NULL;
return progress_->ProgressReport(false);
}
+bool V8HeapExplorer::IterateAndSetObjectNames(SnapshotFillerInterface* filler) {
+ HeapIterator iterator(HeapIterator::kFilterUnreachable);
+ filler_ = filler;
+ for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
+ SetObjectName(obj);
+ }
+ return true;
+}
+
+
+void V8HeapExplorer::SetObjectName(HeapObject* object) {
+ if (!object->IsJSObject() || object->IsJSRegExp() || object->IsJSFunction()) {
+ return;
+ }
+ const char* name = collection_->names()->GetName(
+ GetConstructorName(JSObject::cast(object)));
+ if (object->IsJSGlobalObject()) {
+ const char* tag = objects_tags_.GetTag(object);
+ if (tag != NULL) {
+ name = collection_->names()->GetFormatted("%s / %s", name, tag);
+ }
+ }
+ GetEntry(object)->set_name(name);
+}
+
+
void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
HeapEntry* parent_entry,
String* reference_name,
@@ -2258,6 +2401,22 @@ void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
}
+void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ const char* reference_name,
+ Object* child_obj) {
+ HeapEntry* child_entry = GetEntry(child_obj);
+ if (child_entry != NULL) {
+ filler_->SetNamedReference(HeapGraphEdge::kShortcut,
+ parent_obj,
+ parent_entry,
+ reference_name,
+ child_obj,
+ child_entry);
+ }
+}
+
+
void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
HeapEntry* parent_entry,
int index,
@@ -2326,19 +2485,45 @@ void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
}
+void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ int index,
+ Object* child_obj,
+ int field_offset) {
+ HeapEntry* child_entry = GetEntry(child_obj);
+ if (child_entry != NULL) {
+ filler_->SetIndexedReference(HeapGraphEdge::kWeak,
+ parent_obj,
+ parent_entry,
+ index,
+ child_obj,
+ child_entry);
+ IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
+ }
+}
+
+
void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
HeapEntry* parent_entry,
String* reference_name,
Object* child_obj,
+ const char* name_format_string,
int field_offset) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
HeapGraphEdge::Type type = reference_name->length() > 0 ?
HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
+ const char* name = name_format_string != NULL ?
+ collection_->names()->GetFormatted(
+ name_format_string,
+ *reference_name->ToCString(DISALLOW_NULLS,
+ ROBUST_STRING_TRAVERSAL)) :
+ collection_->names()->GetName(reference_name);
+
filler_->SetNamedReference(type,
parent_obj,
parent_entry,
- collection_->names()->GetName(reference_name),
+ name,
child_obj,
child_entry);
IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
@@ -2380,12 +2565,21 @@ void V8HeapExplorer::SetRootShortcutReference(Object* child_obj) {
}
-void V8HeapExplorer::SetGcRootsReference(Object* child_obj) {
+void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
+ filler_->SetIndexedAutoIndexReference(
+ HeapGraphEdge::kElement,
+ kGcRootsObject, snapshot_->gc_roots(),
+ GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag));
+}
+
+
+void V8HeapExplorer::SetGcSubrootReference(
+ VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
HeapEntry* child_entry = GetEntry(child_obj);
if (child_entry != NULL) {
filler_->SetIndexedAutoIndexReference(
- HeapGraphEdge::kElement,
- kGcRootsObject, snapshot_->gc_roots(),
+ is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
+ GetNthGcSubrootObject(tag), snapshot_->gc_subroot(tag),
child_obj, child_entry);
}
}
@@ -2396,7 +2590,6 @@ void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
!obj->IsOddball() &&
obj != heap_->raw_unchecked_empty_byte_array() &&
obj != heap_->raw_unchecked_empty_fixed_array() &&
- obj != heap_->raw_unchecked_empty_fixed_double_array() &&
obj != heap_->raw_unchecked_empty_descriptor_array()) {
objects_tags_.SetTag(obj, tag);
}
@@ -2429,6 +2622,7 @@ class GlobalObjectsEnumerator : public ObjectVisitor {
// Modifies heap. Must not be run during heap traversal.
void V8HeapExplorer::TagGlobalObjects() {
+ HandleScope scope;
Isolate* isolate = Isolate::Current();
GlobalObjectsEnumerator enumerator;
isolate->global_handles()->IterateAllRoots(&enumerator);
@@ -2439,6 +2633,7 @@ void V8HeapExplorer::TagGlobalObjects() {
const char** urls = NewArray<const char*>(enumerator.count());
for (int i = 0, l = enumerator.count(); i < l; ++i) {
urls[i] = NULL;
+ HandleScope scope;
Handle<JSGlobalObject> global_obj = enumerator.at(i);
Object* obj_document;
if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
@@ -2476,9 +2671,43 @@ class GlobalHandlesExtractor : public ObjectVisitor {
NativeObjectsExplorer* explorer_;
};
-HeapThing const NativeObjectsExplorer::kNativesRootObject =
- reinterpret_cast<HeapThing>(
- static_cast<intptr_t>(HeapObjectsMap::kNativesRootObjectId));
+
+class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
+ public:
+ BasicHeapEntriesAllocator(
+ HeapSnapshot* snapshot,
+ HeapEntry::Type entries_type)
+ : snapshot_(snapshot),
+ collection_(snapshot_->collection()),
+ entries_type_(entries_type) {
+ }
+ virtual HeapEntry* AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count);
+ private:
+ HeapSnapshot* snapshot_;
+ HeapSnapshotsCollection* collection_;
+ HeapEntry::Type entries_type_;
+};
+
+
+HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(
+ HeapThing ptr, int children_count, int retainers_count) {
+ v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
+ intptr_t elements = info->GetElementCount();
+ intptr_t size = info->GetSizeInBytes();
+ return snapshot_->AddEntry(
+ entries_type_,
+ elements != -1 ?
+ collection_->names()->GetFormatted(
+ "%s / %" V8_PTR_PREFIX "d entries",
+ info->GetLabel(),
+ info->GetElementCount()) :
+ collection_->names()->GetCopy(info->GetLabel()),
+ HeapObjectsMap::GenerateId(info),
+ size != -1 ? static_cast<int>(size) : 0,
+ children_count,
+ retainers_count);
+}
NativeObjectsExplorer::NativeObjectsExplorer(
@@ -2488,7 +2717,12 @@ NativeObjectsExplorer::NativeObjectsExplorer(
progress_(progress),
embedder_queried_(false),
objects_by_info_(RetainedInfosMatch),
+ native_groups_(StringsMatch),
filler_(NULL) {
+ synthetic_entries_allocator_ =
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
+ native_entries_allocator_ =
+ new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
}
@@ -2503,37 +2737,15 @@ NativeObjectsExplorer::~NativeObjectsExplorer() {
reinterpret_cast<List<HeapObject*>* >(p->value);
delete objects;
}
-}
-
-
-HeapEntry* NativeObjectsExplorer::AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count) {
- if (ptr == kNativesRootObject) {
- return snapshot_->AddNativesRootEntry(children_count, retainers_count);
- } else {
+ for (HashMap::Entry* p = native_groups_.Start();
+ p != NULL;
+ p = native_groups_.Next(p)) {
v8::RetainedObjectInfo* info =
- reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
- intptr_t elements = info->GetElementCount();
- intptr_t size = info->GetSizeInBytes();
- return snapshot_->AddEntry(
- HeapEntry::kNative,
- elements != -1 ?
- collection_->names()->GetFormatted(
- "%s / %" V8_PTR_PREFIX "d entries",
- info->GetLabel(),
- info->GetElementCount()) :
- collection_->names()->GetCopy(info->GetLabel()),
- HeapObjectsMap::GenerateId(info),
- size != -1 ? static_cast<int>(size) : 0,
- children_count,
- retainers_count);
+ reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
+ info->Dispose();
}
-}
-
-
-void NativeObjectsExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
- if (EstimateObjectsCount() <= 0) return;
- filler->AddEntry(kNativesRootObject, this);
+ delete synthetic_entries_allocator_;
+ delete native_entries_allocator_;
}
@@ -2568,6 +2780,29 @@ void NativeObjectsExplorer::FillRetainedObjects() {
embedder_queried_ = true;
}
+void NativeObjectsExplorer::FillImplicitReferences() {
+ Isolate* isolate = Isolate::Current();
+ List<ImplicitRefGroup*>* groups =
+ isolate->global_handles()->implicit_ref_groups();
+ for (int i = 0; i < groups->length(); ++i) {
+ ImplicitRefGroup* group = groups->at(i);
+ HeapObject* parent = *group->parent_;
+ HeapEntry* parent_entry =
+ filler_->FindOrAddEntry(parent, native_entries_allocator_);
+ ASSERT(parent_entry != NULL);
+ Object*** children = group->children_;
+ for (size_t j = 0; j < group->length_; ++j) {
+ Object* child = *children[j];
+ HeapEntry* child_entry =
+ filler_->FindOrAddEntry(child, native_entries_allocator_);
+ filler_->SetNamedReference(
+ HeapGraphEdge::kInternal,
+ parent, parent_entry,
+ "native",
+ child, child_entry);
+ }
+ }
+}
List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
v8::RetainedObjectInfo* info) {
@@ -2584,34 +2819,82 @@ List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
bool NativeObjectsExplorer::IterateAndExtractReferences(
SnapshotFillerInterface* filler) {
- if (EstimateObjectsCount() <= 0) return true;
filler_ = filler;
FillRetainedObjects();
- for (HashMap::Entry* p = objects_by_info_.Start();
- p != NULL;
- p = objects_by_info_.Next(p)) {
- v8::RetainedObjectInfo* info =
- reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
- SetNativeRootReference(info);
- List<HeapObject*>* objects =
- reinterpret_cast<List<HeapObject*>* >(p->value);
- for (int i = 0; i < objects->length(); ++i) {
- SetWrapperNativeReferences(objects->at(i), info);
+ FillImplicitReferences();
+ if (EstimateObjectsCount() > 0) {
+ for (HashMap::Entry* p = objects_by_info_.Start();
+ p != NULL;
+ p = objects_by_info_.Next(p)) {
+ v8::RetainedObjectInfo* info =
+ reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
+ SetNativeRootReference(info);
+ List<HeapObject*>* objects =
+ reinterpret_cast<List<HeapObject*>* >(p->value);
+ for (int i = 0; i < objects->length(); ++i) {
+ SetWrapperNativeReferences(objects->at(i), info);
+ }
}
+ SetRootNativeRootsReference();
}
- SetRootNativesRootReference();
filler_ = NULL;
return true;
}
+class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
+ public:
+ explicit NativeGroupRetainedObjectInfo(const char* label)
+ : disposed_(false),
+ hash_(reinterpret_cast<intptr_t>(label)),
+ label_(label) {
+ }
+
+ virtual ~NativeGroupRetainedObjectInfo() {}
+ virtual void Dispose() {
+ CHECK(!disposed_);
+ disposed_ = true;
+ delete this;
+ }
+ virtual bool IsEquivalent(RetainedObjectInfo* other) {
+ return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
+ }
+ virtual intptr_t GetHash() { return hash_; }
+ virtual const char* GetLabel() { return label_; }
+
+ private:
+ bool disposed_;
+ intptr_t hash_;
+ const char* label_;
+};
+
+
+NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
+ const char* label) {
+ const char* label_copy = collection_->names()->GetCopy(label);
+ uint32_t hash = HashSequentialString(label_copy,
+ static_cast<int>(strlen(label_copy)),
+ HEAP->HashSeed());
+ HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
+ hash, true);
+ if (entry->value == NULL)
+ entry->value = new NativeGroupRetainedObjectInfo(label);
+ return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
+}
+
+
void NativeObjectsExplorer::SetNativeRootReference(
v8::RetainedObjectInfo* info) {
- HeapEntry* child_entry = filler_->FindOrAddEntry(info, this);
+ HeapEntry* child_entry =
+ filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(child_entry != NULL);
- filler_->SetIndexedAutoIndexReference(
- HeapGraphEdge::kElement,
- kNativesRootObject, snapshot_->natives_root(),
+ NativeGroupRetainedObjectInfo* group_info =
+ FindOrAddGroupInfo(info->GetGroupLabel());
+ HeapEntry* group_entry =
+ filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
+ filler_->SetNamedAutoIndexReference(
+ HeapGraphEdge::kInternal,
+ group_info, group_entry,
info, child_entry);
}
@@ -2620,7 +2903,8 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
HeapObject* wrapper, v8::RetainedObjectInfo* info) {
HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
ASSERT(wrapper_entry != NULL);
- HeapEntry* info_entry = filler_->FindOrAddEntry(info, this);
+ HeapEntry* info_entry =
+ filler_->FindOrAddEntry(info, native_entries_allocator_);
ASSERT(info_entry != NULL);
filler_->SetNamedReference(HeapGraphEdge::kInternal,
wrapper, wrapper_entry,
@@ -2632,11 +2916,20 @@ void NativeObjectsExplorer::SetWrapperNativeReferences(
}
-void NativeObjectsExplorer::SetRootNativesRootReference() {
- filler_->SetIndexedAutoIndexReference(
- HeapGraphEdge::kElement,
- V8HeapExplorer::kInternalRootObject, snapshot_->root(),
- kNativesRootObject, snapshot_->natives_root());
+void NativeObjectsExplorer::SetRootNativeRootsReference() {
+ for (HashMap::Entry* entry = native_groups_.Start();
+ entry;
+ entry = native_groups_.Next(entry)) {
+ NativeGroupRetainedObjectInfo* group_info =
+ static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
+ HeapEntry* group_entry =
+ filler_->FindOrAddEntry(group_info, native_entries_allocator_);
+ ASSERT(group_entry != NULL);
+ filler_->SetIndexedAutoIndexReference(
+ HeapGraphEdge::kElement,
+ V8HeapExplorer::kInternalRootObject, snapshot_->root(),
+ group_info, group_entry);
+ }
}
@@ -2650,15 +2943,6 @@ void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
}
-HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
- v8::ActivityControl* control)
- : snapshot_(snapshot),
- control_(control),
- v8_heap_explorer_(snapshot_, this),
- dom_explorer_(snapshot_, this) {
-}
-
-
class SnapshotCounter : public SnapshotFillerInterface {
public:
explicit SnapshotCounter(HeapEntriesMap* entries) : entries_(entries) { }
@@ -2783,6 +3067,15 @@ class SnapshotFiller : public SnapshotFillerInterface {
};
+HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
+ v8::ActivityControl* control)
+ : snapshot_(snapshot),
+ control_(control),
+ v8_heap_explorer_(snapshot_, this),
+ dom_explorer_(snapshot_, this) {
+}
+
+
bool HeapSnapshotGenerator::GenerateSnapshot() {
v8_heap_explorer_.TagGlobalObjects();
@@ -2790,8 +3083,12 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
// full GC is reachable from the root when computing dominators.
// This is not true for weakly reachable objects.
// As a temporary solution we call GC twice.
- Isolate::Current()->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- Isolate::Current()->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ Isolate::Current()->heap()->CollectAllGarbage(
+ Heap::kMakeHeapIterableMask,
+ "HeapSnapshotGenerator::GenerateSnapshot");
+ Isolate::Current()->heap()->CollectAllGarbage(
+ Heap::kMakeHeapIterableMask,
+ "HeapSnapshotGenerator::GenerateSnapshot");
#ifdef DEBUG
Heap* debug_heap = Isolate::Current()->heap();
@@ -2810,7 +3107,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
debug_heap->Verify();
#endif
- SetProgressTotal(4); // 2 passes + dominators + sizes.
+ SetProgressTotal(2); // 2 passes.
#ifdef DEBUG
debug_heap->Verify();
@@ -2823,17 +3120,21 @@ bool HeapSnapshotGenerator::GenerateSnapshot() {
debug_heap->Verify();
#endif
- // Allocate and fill entries in the snapshot, allocate references.
+ // Allocate memory for entries and references.
snapshot_->AllocateEntries(entries_.entries_count(),
entries_.total_children_count(),
entries_.total_retainers_count());
- entries_.AllocateEntries();
+
+ // Allocate heap objects to entries hash map.
+ entries_.AllocateEntries(V8HeapExplorer::kInternalRootObject);
// Pass 2. Fill references.
if (!FillReferences()) return false;
+ snapshot_->RememberLastJSObjectId();
+
if (!SetEntriesDominators()) return false;
- if (!ApproximateRetainedSizes()) return false;
+ if (!CalculateRetainedSizes()) return false;
progress_counter_ = progress_total_;
if (!ProgressReport(true)) return false;
@@ -2871,18 +3172,22 @@ void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
bool HeapSnapshotGenerator::CountEntriesAndReferences() {
SnapshotCounter counter(&entries_);
v8_heap_explorer_.AddRootEntries(&counter);
- dom_explorer_.AddRootEntries(&counter);
- return
- v8_heap_explorer_.IterateAndExtractReferences(&counter) &&
- dom_explorer_.IterateAndExtractReferences(&counter);
+ return v8_heap_explorer_.IterateAndExtractReferences(&counter)
+ && dom_explorer_.IterateAndExtractReferences(&counter);
}
bool HeapSnapshotGenerator::FillReferences() {
SnapshotFiller filler(snapshot_, &entries_);
- return
- v8_heap_explorer_.IterateAndExtractReferences(&filler) &&
- dom_explorer_.IterateAndExtractReferences(&filler);
+ // IterateAndExtractReferences cannot set object names because
+ // it makes call to JSObject::LocalLookupRealNamedProperty which
+ // in turn may relocate objects in property maps thus changing the heap
+ // layout and affecting retainer counts. This is not acceptable because
+ // number of retainers must not change between count and fill passes.
+ // To avoid this there's a separate postpass that set object names.
+ return v8_heap_explorer_.IterateAndExtractReferences(&filler)
+ && dom_explorer_.IterateAndExtractReferences(&filler)
+ && v8_heap_explorer_.IterateAndSetObjectNames(&filler);
}
@@ -2892,7 +3197,7 @@ void HeapSnapshotGenerator::FillReversePostorderIndexes(
int current_entry = 0;
List<HeapEntry*> nodes_to_visit;
nodes_to_visit.Add(snapshot_->root());
- snapshot_->root()->paint_reachable();
+ snapshot_->root()->paint();
while (!nodes_to_visit.is_empty()) {
HeapEntry* entry = nodes_to_visit.last();
Vector<HeapGraphEdge> children = entry->children();
@@ -2900,9 +3205,9 @@ void HeapSnapshotGenerator::FillReversePostorderIndexes(
for (int i = 0; i < children.length(); ++i) {
if (children[i].type() == HeapGraphEdge::kShortcut) continue;
HeapEntry* child = children[i].to();
- if (!child->painted_reachable()) {
+ if (!child->painted()) {
nodes_to_visit.Add(child);
- child->paint_reachable();
+ child->paint();
has_new_edges = true;
}
}
@@ -2916,59 +3221,72 @@ void HeapSnapshotGenerator::FillReversePostorderIndexes(
}
-static int Intersect(int i1, int i2, const Vector<HeapEntry*>& dominators) {
+static int Intersect(int i1, int i2, const Vector<int>& dominators) {
int finger1 = i1, finger2 = i2;
while (finger1 != finger2) {
- while (finger1 < finger2) finger1 = dominators[finger1]->ordered_index();
- while (finger2 < finger1) finger2 = dominators[finger2]->ordered_index();
+ while (finger1 < finger2) finger1 = dominators[finger1];
+ while (finger2 < finger1) finger2 = dominators[finger2];
}
return finger1;
}
+
// The algorithm is based on the article:
// K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
// Softw. Pract. Exper. 4 (2001), pp. 1-10.
bool HeapSnapshotGenerator::BuildDominatorTree(
const Vector<HeapEntry*>& entries,
- Vector<HeapEntry*>* dominators) {
+ Vector<int>* dominators) {
if (entries.length() == 0) return true;
const int entries_length = entries.length(), root_index = entries_length - 1;
- for (int i = 0; i < root_index; ++i) (*dominators)[i] = NULL;
- (*dominators)[root_index] = entries[root_index];
- int changed = 1;
- const int base_progress_counter = progress_counter_;
- while (changed != 0) {
- changed = 0;
+ static const int kNoDominator = -1;
+ for (int i = 0; i < root_index; ++i) (*dominators)[i] = kNoDominator;
+ (*dominators)[root_index] = root_index;
+
+ // The affected array is used to mark entries which dominators
+ // have to be racalculated because of changes in their retainers.
+ ScopedVector<bool> affected(entries_length);
+ for (int i = 0; i < affected.length(); ++i) affected[i] = false;
+ // Mark the root direct children as affected.
+ Vector<HeapGraphEdge> children = entries[root_index]->children();
+ for (int i = 0; i < children.length(); ++i) {
+ affected[children[i].to()->ordered_index()] = true;
+ }
+
+ bool changed = true;
+ while (changed) {
+ changed = false;
+ if (!ProgressReport(true)) return false;
for (int i = root_index - 1; i >= 0; --i) {
- HeapEntry* new_idom = NULL;
+ if (!affected[i]) continue;
+ affected[i] = false;
+ // If dominator of the entry has already been set to root,
+ // then it can't propagate any further.
+ if ((*dominators)[i] == root_index) continue;
+ int new_idom_index = kNoDominator;
Vector<HeapGraphEdge*> rets = entries[i]->retainers();
- int j = 0;
- for (; j < rets.length(); ++j) {
+ for (int j = 0; j < rets.length(); ++j) {
if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* ret = rets[j]->From();
- if (dominators->at(ret->ordered_index()) != NULL) {
- new_idom = ret;
- break;
+ int ret_index = rets[j]->From()->ordered_index();
+ if (dominators->at(ret_index) != kNoDominator) {
+ new_idom_index = new_idom_index == kNoDominator
+ ? ret_index
+ : Intersect(ret_index, new_idom_index, *dominators);
+ // If idom has already reached the root, it doesn't make sense
+ // to check other retainers.
+ if (new_idom_index == root_index) break;
}
}
- for (++j; j < rets.length(); ++j) {
- if (rets[j]->type() == HeapGraphEdge::kShortcut) continue;
- HeapEntry* ret = rets[j]->From();
- if (dominators->at(ret->ordered_index()) != NULL) {
- new_idom = entries[Intersect(ret->ordered_index(),
- new_idom->ordered_index(),
- *dominators)];
+ if (new_idom_index != kNoDominator
+ && dominators->at(i) != new_idom_index) {
+ (*dominators)[i] = new_idom_index;
+ changed = true;
+ Vector<HeapGraphEdge> children = entries[i]->children();
+ for (int j = 0; j < children.length(); ++j) {
+ affected[children[j].to()->ordered_index()] = true;
}
}
- if (new_idom != NULL && dominators->at(i) != new_idom) {
- (*dominators)[i] = new_idom;
- ++changed;
- }
}
- int remaining = entries_length - changed;
- if (remaining < 0) remaining = 0;
- progress_counter_ = base_progress_counter + remaining;
- if (!ProgressReport(true)) return false;
}
return true;
}
@@ -2978,40 +3296,49 @@ bool HeapSnapshotGenerator::SetEntriesDominators() {
// This array is used for maintaining reverse postorder of nodes.
ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries()->length());
FillReversePostorderIndexes(&ordered_entries);
- ScopedVector<HeapEntry*> dominators(ordered_entries.length());
+ ScopedVector<int> dominators(ordered_entries.length());
if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
for (int i = 0; i < ordered_entries.length(); ++i) {
- ASSERT(dominators[i] != NULL);
- ordered_entries[i]->set_dominator(dominators[i]);
+ ASSERT(dominators[i] >= 0);
+ ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
}
return true;
}
-bool HeapSnapshotGenerator::ApproximateRetainedSizes() {
+bool HeapSnapshotGenerator::CalculateRetainedSizes() {
// As for the dominators tree we only know parent nodes, not
// children, to sum up total sizes we "bubble" node's self size
// adding it to all of its parents.
- for (int i = 0; i < snapshot_->entries()->length(); ++i) {
- HeapEntry* entry = snapshot_->entries()->at(i);
+ List<HeapEntry*>& entries = *snapshot_->entries();
+ for (int i = 0; i < entries.length(); ++i) {
+ HeapEntry* entry = entries[i];
entry->set_retained_size(entry->self_size());
}
- for (int i = 0;
- i < snapshot_->entries()->length();
- ++i, ProgressStep()) {
- HeapEntry* entry = snapshot_->entries()->at(i);
+ for (int i = 0; i < entries.length(); ++i) {
+ HeapEntry* entry = entries[i];
int entry_size = entry->self_size();
for (HeapEntry* dominator = entry->dominator();
dominator != entry;
entry = dominator, dominator = entry->dominator()) {
dominator->add_retained_size(entry_size);
}
- if (!ProgressReport()) return false;
}
return true;
}
+template<int bytes> struct MaxDecimalDigitsIn;
+template<> struct MaxDecimalDigitsIn<4> {
+ static const int kSigned = 11;
+ static const int kUnsigned = 10;
+};
+template<> struct MaxDecimalDigitsIn<8> {
+ static const int kSigned = 20;
+ static const int kUnsigned = 20;
+};
+
+
class OutputStreamWriter {
public:
explicit OutputStreamWriter(v8::OutputStream* stream)
@@ -3046,9 +3373,7 @@ class OutputStreamWriter {
MaybeWriteChunk();
}
}
- void AddNumber(int n) { AddNumberImpl<int>(n, "%d"); }
void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
- void AddNumber(uint64_t n) { AddNumberImpl<uint64_t>(n, "%llu"); }
void Finalize() {
if (aborted_) return;
ASSERT(chunk_pos_ < chunk_size_);
@@ -3061,23 +3386,34 @@ class OutputStreamWriter {
private:
template<typename T>
void AddNumberImpl(T n, const char* format) {
- ScopedVector<char> buffer(32);
- int result = OS::SNPrintF(buffer, format, n);
- USE(result);
- ASSERT(result != -1);
- AddString(buffer.start());
+ // Buffer for the longest value plus trailing \0
+ static const int kMaxNumberSize =
+ MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
+ if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
+ int result = OS::SNPrintF(
+ chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
+ ASSERT(result != -1);
+ chunk_pos_ += result;
+ MaybeWriteChunk();
+ } else {
+ EmbeddedVector<char, kMaxNumberSize> buffer;
+ int result = OS::SNPrintF(buffer, format, n);
+ USE(result);
+ ASSERT(result != -1);
+ AddString(buffer.start());
+ }
}
void MaybeWriteChunk() {
ASSERT(chunk_pos_ <= chunk_size_);
if (chunk_pos_ == chunk_size_) {
WriteChunk();
- chunk_pos_ = 0;
}
}
void WriteChunk() {
if (aborted_) return;
if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
v8::OutputStream::kAbort) aborted_ = true;
+ chunk_pos_ = 0;
}
v8::OutputStream* stream_;
@@ -3087,22 +3423,20 @@ class OutputStreamWriter {
bool aborted_;
};
-const int HeapSnapshotJSONSerializer::kMaxSerializableSnapshotRawSize =
- 256 * MB;
void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
ASSERT(writer_ == NULL);
writer_ = new OutputStreamWriter(stream);
HeapSnapshot* original_snapshot = NULL;
- if (snapshot_->raw_entries_size() >= kMaxSerializableSnapshotRawSize) {
+ if (snapshot_->raw_entries_size() >=
+ SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
// The snapshot is too big. Serialize a fake snapshot.
original_snapshot = snapshot_;
snapshot_ = CreateFakeSnapshot();
}
// Since nodes graph is cyclic, we need the first pass to enumerate
// them. Strings can be serialized in one pass.
- EnumerateNodes();
SerializeImpl();
delete writer_;
@@ -3122,8 +3456,14 @@ HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
snapshot_->uid());
result->AllocateEntries(2, 1, 0);
HeapEntry* root = result->AddRootEntry(1);
+ const char* text = snapshot_->collection()->names()->GetFormatted(
+ "The snapshot is too big. "
+ "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
+ "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
+ SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
+ (snapshot_->raw_entries_size() + MB - 1) / MB);
HeapEntry* message = result->AddEntry(
- HeapEntry::kString, "The snapshot is too big", 0, 4, 0, 0);
+ HeapEntry::kString, text, 0, 4, 0, 0);
root->SetUnidirElementReference(0, 1, message);
result->SetDominatorsToSelf();
return result;
@@ -3149,34 +3489,6 @@ void HeapSnapshotJSONSerializer::SerializeImpl() {
}
-class HeapSnapshotJSONSerializerEnumerator {
- public:
- explicit HeapSnapshotJSONSerializerEnumerator(HeapSnapshotJSONSerializer* s)
- : s_(s) {
- }
- void Apply(HeapEntry** entry) {
- s_->GetNodeId(*entry);
- }
- private:
- HeapSnapshotJSONSerializer* s_;
-};
-
-void HeapSnapshotJSONSerializer::EnumerateNodes() {
- GetNodeId(snapshot_->root()); // Make sure root gets the first id.
- HeapSnapshotJSONSerializerEnumerator iter(this);
- snapshot_->IterateEntries(&iter);
-}
-
-
-int HeapSnapshotJSONSerializer::GetNodeId(HeapEntry* entry) {
- HashMap::Entry* cache_entry = nodes_.Lookup(entry, ObjectHash(entry), true);
- if (cache_entry->value == NULL) {
- cache_entry->value = reinterpret_cast<void*>(next_node_id_++);
- }
- return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
-}
-
-
int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
HashMap::Entry* cache_entry = strings_.Lookup(
const_cast<char*>(s), ObjectHash(s), true);
@@ -3187,38 +3499,78 @@ int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
}
-void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
- writer_->AddCharacter(',');
- writer_->AddNumber(edge->type());
- writer_->AddCharacter(',');
- if (edge->type() == HeapGraphEdge::kElement
- || edge->type() == HeapGraphEdge::kHidden) {
- writer_->AddNumber(edge->index());
- } else {
- writer_->AddNumber(GetStringId(edge->name()));
+// This function won't work correctly for MIN_INT but this is not
+// a problem in case of heap snapshots serialization.
+static int itoa(int value, const Vector<char>& buffer, int buffer_pos) {
+ if (value < 0) {
+ buffer[buffer_pos++] = '-';
+ value = -value;
}
- writer_->AddCharacter(',');
- writer_->AddNumber(GetNodeId(edge->to()));
+
+ int number_of_digits = 0;
+ int t = value;
+ do {
+ ++number_of_digits;
+ } while (t /= 10);
+
+ buffer_pos += number_of_digits;
+ int result = buffer_pos;
+ do {
+ int last_digit = value % 10;
+ buffer[--buffer_pos] = '0' + last_digit;
+ value /= 10;
+ } while (value);
+ return result;
+}
+
+
+void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge) {
+ // The buffer needs space for 3 ints, 3 commas and \0
+ static const int kBufferSize =
+ MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1; // NOLINT
+ EmbeddedVector<char, kBufferSize> buffer;
+ int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
+ || edge->type() == HeapGraphEdge::kHidden
+ || edge->type() == HeapGraphEdge::kWeak
+ ? edge->index() : GetStringId(edge->name());
+ int buffer_pos = 0;
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(edge->type(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(edge_name_or_index, buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(edge->to()->entry_index(), buffer, buffer_pos);
+ buffer[buffer_pos++] = '\0';
+ writer_->AddString(buffer.start());
}
void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
- writer_->AddCharacter('\n');
- writer_->AddCharacter(',');
- writer_->AddNumber(entry->type());
- writer_->AddCharacter(',');
- writer_->AddNumber(GetStringId(entry->name()));
- writer_->AddCharacter(',');
- writer_->AddNumber(entry->id());
- writer_->AddCharacter(',');
- writer_->AddNumber(entry->self_size());
- writer_->AddCharacter(',');
- writer_->AddNumber(entry->RetainedSize(false));
- writer_->AddCharacter(',');
- writer_->AddNumber(GetNodeId(entry->dominator()));
+ // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
+ static const int kBufferSize =
+ 6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned // NOLINT
+ + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned // NOLINT
+ + 7 + 1 + 1;
+ EmbeddedVector<char, kBufferSize> buffer;
Vector<HeapGraphEdge> children = entry->children();
- writer_->AddCharacter(',');
- writer_->AddNumber(children.length());
+ int buffer_pos = 0;
+ buffer[buffer_pos++] = '\n';
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(entry->type(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(GetStringId(entry->name()), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(entry->id(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(entry->self_size(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(entry->retained_size(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(entry->dominator()->entry_index(), buffer, buffer_pos);
+ buffer[buffer_pos++] = ',';
+ buffer_pos = itoa(children.length(), buffer, buffer_pos);
+ buffer[buffer_pos++] = '\0';
+ writer_->AddString(buffer.start());
for (int i = 0; i < children.length(); ++i) {
SerializeEdge(&children[i]);
if (writer_->aborted()) return;
@@ -3253,7 +3605,8 @@ void HeapSnapshotJSONSerializer::SerializeNodes() {
"," JSON_S("closure")
"," JSON_S("regexp")
"," JSON_S("number")
- "," JSON_S("native"))
+ "," JSON_S("native")
+ "," JSON_S("synthetic"))
"," JSON_S("string")
"," JSON_S("number")
"," JSON_S("number")
@@ -3272,7 +3625,8 @@ void HeapSnapshotJSONSerializer::SerializeNodes() {
"," JSON_S("property")
"," JSON_S("internal")
"," JSON_S("hidden")
- "," JSON_S("shortcut"))
+ "," JSON_S("shortcut")
+ "," JSON_S("weak"))
"," JSON_S("string_or_number")
"," JSON_S("node"))))));
#undef JSON_S
@@ -3282,23 +3636,25 @@ void HeapSnapshotJSONSerializer::SerializeNodes() {
const int node_fields_count = 7;
// type,name,id,self_size,retained_size,dominator,children_count.
const int edge_fields_count = 3; // type,name|index,to_node.
- List<HashMap::Entry*> sorted_nodes;
- SortHashMap(&nodes_, &sorted_nodes);
- // Rewrite node ids, so they refer to actual array positions.
- if (sorted_nodes.length() > 1) {
+
+ List<HeapEntry*>& nodes = *(snapshot_->entries());
+ // Root must be the first.
+ ASSERT(nodes.first() == snapshot_->root());
+ // Rewrite node indexes, so they refer to actual array positions. Do this
+ // only once.
+ if (nodes[0]->entry_index() == -1) {
// Nodes start from array index 1.
- int prev_value = 1;
- sorted_nodes[0]->value = reinterpret_cast<void*>(prev_value);
- for (int i = 1; i < sorted_nodes.length(); ++i) {
- HeapEntry* prev_heap_entry =
- reinterpret_cast<HeapEntry*>(sorted_nodes[i-1]->key);
- prev_value += node_fields_count +
- prev_heap_entry->children().length() * edge_fields_count;
- sorted_nodes[i]->value = reinterpret_cast<void*>(prev_value);
+ int index = 1;
+ for (int i = 0; i < nodes.length(); ++i) {
+ HeapEntry* node = nodes[i];
+ node->set_entry_index(index);
+ index += node_fields_count +
+ node->children().length() * edge_fields_count;
}
}
- for (int i = 0; i < sorted_nodes.length(); ++i) {
- SerializeNode(reinterpret_cast<HeapEntry*>(sorted_nodes[i]->key));
+
+ for (int i = 0; i < nodes.length(); ++i) {
+ SerializeNode(nodes[i]);
if (writer_->aborted()) return;
}
}
diff --git a/src/3rdparty/v8/src/profile-generator.h b/src/3rdparty/v8/src/profile-generator.h
index 0eb73be..1fa647e 100644
--- a/src/3rdparty/v8/src/profile-generator.h
+++ b/src/3rdparty/v8/src/profile-generator.h
@@ -74,6 +74,8 @@ class StringsStorage {
inline const char* GetFunctionName(const char* name);
private:
+ static const int kMaxNameSize = 1024;
+
INLINE(static bool StringsMatch(void* key1, void* key2)) {
return strcmp(reinterpret_cast<char*>(key1),
reinterpret_cast<char*>(key2)) == 0;
@@ -453,7 +455,8 @@ class HeapGraphEdge BASE_EMBEDDED {
kProperty = v8::HeapGraphEdge::kProperty,
kInternal = v8::HeapGraphEdge::kInternal,
kHidden = v8::HeapGraphEdge::kHidden,
- kShortcut = v8::HeapGraphEdge::kShortcut
+ kShortcut = v8::HeapGraphEdge::kShortcut,
+ kWeak = v8::HeapGraphEdge::kWeak
};
HeapGraphEdge() { }
@@ -463,7 +466,7 @@ class HeapGraphEdge BASE_EMBEDDED {
Type type() { return static_cast<Type>(type_); }
int index() {
- ASSERT(type_ == kElement || type_ == kHidden);
+ ASSERT(type_ == kElement || type_ == kHidden || type_ == kWeak);
return index_;
}
const char* name() {
@@ -522,14 +525,15 @@ class HeapEntry BASE_EMBEDDED {
kClosure = v8::HeapGraphNode::kClosure,
kRegExp = v8::HeapGraphNode::kRegExp,
kHeapNumber = v8::HeapGraphNode::kHeapNumber,
- kNative = v8::HeapGraphNode::kNative
+ kNative = v8::HeapGraphNode::kNative,
+ kSynthetic = v8::HeapGraphNode::kSynthetic
};
HeapEntry() { }
void Init(HeapSnapshot* snapshot,
Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int self_size,
int children_count,
int retainers_count);
@@ -537,13 +541,16 @@ class HeapEntry BASE_EMBEDDED {
HeapSnapshot* snapshot() { return snapshot_; }
Type type() { return static_cast<Type>(type_); }
const char* name() { return name_; }
- inline uint64_t id();
+ void set_name(const char* name) { name_ = name; }
+ inline SnapshotObjectId id() { return id_; }
int self_size() { return self_size_; }
int retained_size() { return retained_size_; }
void add_retained_size(int size) { retained_size_ += size; }
void set_retained_size(int value) { retained_size_ = value; }
int ordered_index() { return ordered_index_; }
void set_ordered_index(int value) { ordered_index_ = value; }
+ int entry_index() { return entry_index_; }
+ void set_entry_index(int value) { entry_index_ = value; }
Vector<HeapGraphEdge> children() {
return Vector<HeapGraphEdge>(children_arr(), children_count_); }
@@ -554,22 +561,9 @@ class HeapEntry BASE_EMBEDDED {
ASSERT(entry != NULL);
dominator_ = entry;
}
-
- void clear_paint() { painted_ = kUnpainted; }
- bool painted_reachable() { return painted_ == kPainted; }
- void paint_reachable() {
- ASSERT(painted_ == kUnpainted);
- painted_ = kPainted;
- }
- bool not_painted_reachable_from_others() {
- return painted_ != kPaintedReachableFromOthers;
- }
- void paint_reachable_from_others() {
- painted_ = kPaintedReachableFromOthers;
- }
- template<class Visitor>
- void ApplyAndPaintAllReachable(Visitor* visitor);
- void PaintAllReachable();
+ void clear_paint() { painted_ = false; }
+ bool painted() { return painted_; }
+ void paint() { painted_ = true; }
void SetIndexedReference(HeapGraphEdge::Type type,
int child_index,
@@ -583,16 +577,18 @@ class HeapEntry BASE_EMBEDDED {
int retainer_index);
void SetUnidirElementReference(int child_index, int index, HeapEntry* entry);
- int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); }
- int RetainedSize(bool exact);
+ size_t EntrySize() {
+ return EntriesSize(1, children_count_, retainers_count_);
+ }
- void Print(int max_depth, int indent);
+ void Print(
+ const char* prefix, const char* edge_name, int max_depth, int indent);
Handle<HeapObject> GetHeapObject();
- static int EntriesSize(int entries_count,
- int children_count,
- int retainers_count);
+ static size_t EntriesSize(int entries_count,
+ int children_count,
+ int retainers_count);
private:
HeapGraphEdge* children_arr() {
@@ -601,33 +597,23 @@ class HeapEntry BASE_EMBEDDED {
HeapGraphEdge** retainers_arr() {
return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_);
}
- void CalculateExactRetainedSize();
const char* TypeAsString();
- unsigned painted_: 2;
+ unsigned painted_: 1;
unsigned type_: 4;
- int children_count_: 26;
+ int children_count_: 27;
int retainers_count_;
int self_size_;
union {
int ordered_index_; // Used during dominator tree building.
int retained_size_; // At that moment, there is no retained size yet.
};
+ int entry_index_;
+ SnapshotObjectId id_;
HeapEntry* dominator_;
HeapSnapshot* snapshot_;
- struct Id {
- uint32_t id1_;
- uint32_t id2_;
- } id_; // This is to avoid extra padding of 64-bit value.
const char* name_;
- // Paints used for exact retained sizes calculation.
- static const unsigned kUnpainted = 0;
- static const unsigned kPainted = 1;
- static const unsigned kPaintedReachableFromOthers = 2;
-
- static const int kExactRetainedSizeTag = 1;
-
DISALLOW_COPY_AND_ASSIGN(HeapEntry);
};
@@ -659,25 +645,31 @@ class HeapSnapshot {
HeapEntry* root() { return root_entry_; }
HeapEntry* gc_roots() { return gc_roots_entry_; }
HeapEntry* natives_root() { return natives_root_entry_; }
+ HeapEntry* gc_subroot(int index) { return gc_subroot_entries_[index]; }
List<HeapEntry*>* entries() { return &entries_; }
- int raw_entries_size() { return raw_entries_size_; }
+ size_t raw_entries_size() { return raw_entries_size_; }
+ void RememberLastJSObjectId();
+ SnapshotObjectId max_snapshot_js_object_id() const {
+ return max_snapshot_js_object_id_;
+ }
void AllocateEntries(
int entries_count, int children_count, int retainers_count);
HeapEntry* AddEntry(HeapEntry::Type type,
const char* name,
- uint64_t id,
+ SnapshotObjectId id,
int size,
int children_count,
int retainers_count);
HeapEntry* AddRootEntry(int children_count);
HeapEntry* AddGcRootsEntry(int children_count, int retainers_count);
+ HeapEntry* AddGcSubrootEntry(int tag,
+ int children_count,
+ int retainers_count);
HeapEntry* AddNativesRootEntry(int children_count, int retainers_count);
void ClearPaint();
- HeapEntry* GetEntryById(uint64_t id);
+ HeapEntry* GetEntryById(SnapshotObjectId id);
List<HeapEntry*>* GetSortedEntriesList();
- template<class Visitor>
- void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); }
void SetDominatorsToSelf();
void Print(int max_depth);
@@ -693,10 +685,12 @@ class HeapSnapshot {
HeapEntry* root_entry_;
HeapEntry* gc_roots_entry_;
HeapEntry* natives_root_entry_;
+ HeapEntry* gc_subroot_entries_[VisitorSynchronization::kNumberOfSyncTags];
char* raw_entries_;
List<HeapEntry*> entries_;
- bool entries_sorted_;
- int raw_entries_size_;
+ List<HeapEntry*> sorted_entries_;
+ size_t raw_entries_size_;
+ SnapshotObjectId max_snapshot_js_object_id_;
friend class HeapSnapshotTester;
@@ -710,26 +704,34 @@ class HeapObjectsMap {
~HeapObjectsMap();
void SnapshotGenerationFinished();
- uint64_t FindObject(Address addr);
+ SnapshotObjectId FindObject(Address addr);
void MoveObject(Address from, Address to);
+ SnapshotObjectId last_assigned_id() const {
+ return next_id_ - kObjectIdStep;
+ }
- static uint64_t GenerateId(v8::RetainedObjectInfo* info);
+ static SnapshotObjectId GenerateId(v8::RetainedObjectInfo* info);
+ static inline SnapshotObjectId GetNthGcSubrootId(int delta);
- static const uint64_t kInternalRootObjectId;
- static const uint64_t kGcRootsObjectId;
- static const uint64_t kNativesRootObjectId;
- static const uint64_t kFirstAvailableObjectId;
+ static const int kObjectIdStep = 2;
+ static const SnapshotObjectId kInternalRootObjectId;
+ static const SnapshotObjectId kGcRootsObjectId;
+ static const SnapshotObjectId kNativesRootObjectId;
+ static const SnapshotObjectId kGcRootsFirstSubrootId;
+ static const SnapshotObjectId kFirstAvailableObjectId;
private:
struct EntryInfo {
- explicit EntryInfo(uint64_t id) : id(id), accessed(true) { }
- EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { }
- uint64_t id;
+ explicit EntryInfo(SnapshotObjectId id) : id(id), accessed(true) { }
+ EntryInfo(SnapshotObjectId id, bool accessed)
+ : id(id),
+ accessed(accessed) { }
+ SnapshotObjectId id;
bool accessed;
};
- void AddEntry(Address addr, uint64_t id);
- uint64_t FindEntry(Address addr);
+ void AddEntry(Address addr, SnapshotObjectId id);
+ SnapshotObjectId FindEntry(Address addr);
void RemoveDeadEntries();
static bool AddressesMatch(void* key1, void* key2) {
@@ -738,11 +740,12 @@ class HeapObjectsMap {
static uint32_t AddressHash(Address addr) {
return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr)),
+ v8::internal::kZeroHashSeed);
}
bool initial_fill_mode_;
- uint64_t next_id_;
+ SnapshotObjectId next_id_;
HashMap entries_map_;
List<EntryInfo>* entries_;
@@ -767,9 +770,12 @@ class HeapSnapshotsCollection {
StringsStorage* names() { return &names_; }
TokenEnumerator* token_enumerator() { return token_enumerator_; }
- uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); }
- Handle<HeapObject> FindHeapObjectById(uint64_t id);
+ SnapshotObjectId GetObjectId(Address addr) { return ids_.FindObject(addr); }
+ Handle<HeapObject> FindHeapObjectById(SnapshotObjectId id);
void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
+ SnapshotObjectId last_assigned_id() const {
+ return ids_.last_assigned_id();
+ }
private:
INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) {
@@ -810,7 +816,7 @@ class HeapEntriesMap {
HeapEntriesMap();
~HeapEntriesMap();
- void AllocateEntries();
+ void AllocateEntries(HeapThing root_object);
HeapEntry* Map(HeapThing thing);
void Pair(HeapThing thing, HeapEntriesAllocator* allocator, HeapEntry* entry);
void CountReference(HeapThing from, HeapThing to,
@@ -821,7 +827,7 @@ class HeapEntriesMap {
int total_children_count() { return total_children_count_; }
int total_retainers_count() { return total_retainers_count_; }
- static HeapEntry *const kHeapEntryPlaceholder;
+ static HeapEntry* const kHeapEntryPlaceholder;
private:
struct EntryInfo {
@@ -837,9 +843,12 @@ class HeapEntriesMap {
int retainers_count;
};
+ static inline void AllocateHeapEntryForMapEntry(HashMap::Entry* map_entry);
+
static uint32_t Hash(HeapThing thing) {
return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(thing)),
+ v8::internal::kZeroHashSeed);
}
static bool HeapThingsMatch(HeapThing key1, HeapThing key2) {
return key1 == key2;
@@ -925,6 +934,7 @@ class V8HeapExplorer : public HeapEntriesAllocator {
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount(HeapIterator* iterator);
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
+ bool IterateAndSetObjectNames(SnapshotFillerInterface* filler);
void TagGlobalObjects();
static String* GetConstructorName(JSObject* object);
@@ -949,6 +959,10 @@ class V8HeapExplorer : public HeapEntriesAllocator {
HeapEntry* parent,
String* reference_name,
Object* child);
+ void SetNativeBindReference(HeapObject* parent_obj,
+ HeapEntry* parent,
+ const char* reference_name,
+ Object* child);
void SetElementReference(HeapObject* parent_obj,
HeapEntry* parent,
int index,
@@ -967,10 +981,16 @@ class V8HeapExplorer : public HeapEntriesAllocator {
HeapEntry* parent,
int index,
Object* child);
+ void SetWeakReference(HeapObject* parent_obj,
+ HeapEntry* parent_entry,
+ int index,
+ Object* child_obj,
+ int field_offset);
void SetPropertyReference(HeapObject* parent_obj,
HeapEntry* parent,
String* reference_name,
Object* child,
+ const char* name_format_string = NULL,
int field_offset = -1);
void SetPropertyShortcutReference(HeapObject* parent_obj,
HeapEntry* parent,
@@ -978,11 +998,17 @@ class V8HeapExplorer : public HeapEntriesAllocator {
Object* child);
void SetRootShortcutReference(Object* child);
void SetRootGcRootsReference();
- void SetGcRootsReference(Object* child);
+ void SetGcRootsReference(VisitorSynchronization::SyncTag tag);
+ void SetGcSubrootReference(
+ VisitorSynchronization::SyncTag tag, bool is_weak, Object* child);
+ void SetObjectName(HeapObject* object);
void TagObject(Object* obj, const char* tag);
HeapEntry* GetEntry(Object* obj);
+ static inline HeapObject* GetNthGcSubrootObject(int delta);
+ static inline int GetGcSubrootOrder(HeapObject* subroot);
+
Heap* heap_;
HeapSnapshot* snapshot_;
HeapSnapshotsCollection* collection_;
@@ -991,43 +1017,55 @@ class V8HeapExplorer : public HeapEntriesAllocator {
HeapObjectsSet objects_tags_;
static HeapObject* const kGcRootsObject;
+ static HeapObject* const kFirstGcSubrootObject;
+ static HeapObject* const kLastGcSubrootObject;
friend class IndexedReferencesExtractor;
+ friend class GcSubrootsEnumerator;
friend class RootsReferencesExtractor;
DISALLOW_COPY_AND_ASSIGN(V8HeapExplorer);
};
+class NativeGroupRetainedObjectInfo;
+
+
// An implementation of retained native objects extractor.
-class NativeObjectsExplorer : public HeapEntriesAllocator {
+class NativeObjectsExplorer {
public:
NativeObjectsExplorer(HeapSnapshot* snapshot,
SnapshottingProgressReportingInterface* progress);
virtual ~NativeObjectsExplorer();
- virtual HeapEntry* AllocateEntry(
- HeapThing ptr, int children_count, int retainers_count);
void AddRootEntries(SnapshotFillerInterface* filler);
int EstimateObjectsCount();
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
private:
void FillRetainedObjects();
+ void FillImplicitReferences();
List<HeapObject*>* GetListMaybeDisposeInfo(v8::RetainedObjectInfo* info);
void SetNativeRootReference(v8::RetainedObjectInfo* info);
- void SetRootNativesRootReference();
+ void SetRootNativeRootsReference();
void SetWrapperNativeReferences(HeapObject* wrapper,
v8::RetainedObjectInfo* info);
void VisitSubtreeWrapper(Object** p, uint16_t class_id);
static uint32_t InfoHash(v8::RetainedObjectInfo* info) {
- return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()));
+ return ComputeIntegerHash(static_cast<uint32_t>(info->GetHash()),
+ v8::internal::kZeroHashSeed);
}
static bool RetainedInfosMatch(void* key1, void* key2) {
return key1 == key2 ||
(reinterpret_cast<v8::RetainedObjectInfo*>(key1))->IsEquivalent(
reinterpret_cast<v8::RetainedObjectInfo*>(key2));
}
+ INLINE(static bool StringsMatch(void* key1, void* key2)) {
+ return strcmp(reinterpret_cast<char*>(key1),
+ reinterpret_cast<char*>(key2)) == 0;
+ }
+
+ NativeGroupRetainedObjectInfo* FindOrAddGroupInfo(const char* label);
HeapSnapshot* snapshot_;
HeapSnapshotsCollection* collection_;
@@ -1036,6 +1074,9 @@ class NativeObjectsExplorer : public HeapEntriesAllocator {
HeapObjectsSet in_groups_;
// RetainedObjectInfo* -> List<HeapObject*>*
HashMap objects_by_info_;
+ HashMap native_groups_;
+ HeapEntriesAllocator* synthetic_entries_allocator_;
+ HeapEntriesAllocator* native_entries_allocator_;
// Used during references extraction.
SnapshotFillerInterface* filler_;
@@ -1054,9 +1095,9 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface {
bool GenerateSnapshot();
private:
- bool ApproximateRetainedSizes();
bool BuildDominatorTree(const Vector<HeapEntry*>& entries,
- Vector<HeapEntry*>* dominators);
+ Vector<int>* dominators);
+ bool CalculateRetainedSizes();
bool CountEntriesAndReferences();
bool FillReferences();
void FillReversePostorderIndexes(Vector<HeapEntry*>* entries);
@@ -1084,7 +1125,6 @@ class HeapSnapshotJSONSerializer {
public:
explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot)
: snapshot_(snapshot),
- nodes_(ObjectsMatch),
strings_(ObjectsMatch),
next_node_id_(1),
next_string_id_(1),
@@ -1099,12 +1139,11 @@ class HeapSnapshotJSONSerializer {
INLINE(static uint32_t ObjectHash(const void* key)) {
return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)));
+ static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key)),
+ v8::internal::kZeroHashSeed);
}
- void EnumerateNodes();
HeapSnapshot* CreateFakeSnapshot();
- int GetNodeId(HeapEntry* entry);
int GetStringId(const char* s);
void SerializeEdge(HeapGraphEdge* edge);
void SerializeImpl();
@@ -1118,7 +1157,6 @@ class HeapSnapshotJSONSerializer {
static const int kMaxSerializableSnapshotRawSize;
HeapSnapshot* snapshot_;
- HashMap nodes_;
HashMap strings_;
int next_node_id_;
int next_string_id_;
diff --git a/src/3rdparty/v8/src/property-details.h b/src/3rdparty/v8/src/property-details.h
new file mode 100644
index 0000000..c79aa96
--- /dev/null
+++ b/src/3rdparty/v8/src/property-details.h
@@ -0,0 +1,132 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_PROPERTY_DETAILS_H_
+#define V8_PROPERTY_DETAILS_H_
+
+#include "../include/v8.h"
+#include "allocation.h"
+#include "utils.h"
+
+// Ecma-262 3rd 8.6.1
+enum PropertyAttributes {
+ NONE = v8::None,
+ READ_ONLY = v8::ReadOnly,
+ DONT_ENUM = v8::DontEnum,
+ DONT_DELETE = v8::DontDelete,
+ ABSENT = 16 // Used in runtime to indicate a property is absent.
+ // ABSENT can never be stored in or returned from a descriptor's attributes
+ // bitfield. It is only used as a return value meaning the attributes of
+ // a non-existent property.
+};
+
+
+namespace v8 {
+namespace internal {
+
+class Smi;
+
+// Type of properties.
+// Order of properties is significant.
+// Must fit in the BitField PropertyDetails::TypeField.
+// A copy of this is in mirror-debugger.js.
+enum PropertyType {
+ NORMAL = 0, // only in slow mode
+ FIELD = 1, // only in fast mode
+ CONSTANT_FUNCTION = 2, // only in fast mode
+ CALLBACKS = 3,
+ HANDLER = 4, // only in lookup results, not in descriptors
+ INTERCEPTOR = 5, // only in lookup results, not in descriptors
+ // All properties before MAP_TRANSITION are real.
+ MAP_TRANSITION = 6, // only in fast mode
+ ELEMENTS_TRANSITION = 7,
+ CONSTANT_TRANSITION = 8, // only in fast mode
+ NULL_DESCRIPTOR = 9, // only in fast mode
+ // There are no IC stubs for NULL_DESCRIPTORS. Therefore,
+ // NULL_DESCRIPTOR can be used as the type flag for IC stubs for
+ // nonexistent properties.
+ NONEXISTENT = NULL_DESCRIPTOR
+};
+
+
+// PropertyDetails captures type and attributes for a property.
+// They are used both in property dictionaries and instance descriptors.
+class PropertyDetails BASE_EMBEDDED {
+ public:
+ PropertyDetails(PropertyAttributes attributes,
+ PropertyType type,
+ int index = 0) {
+ ASSERT(TypeField::is_valid(type));
+ ASSERT(AttributesField::is_valid(attributes));
+ ASSERT(StorageField::is_valid(index));
+
+ value_ = TypeField::encode(type)
+ | AttributesField::encode(attributes)
+ | StorageField::encode(index);
+
+ ASSERT(type == this->type());
+ ASSERT(attributes == this->attributes());
+ ASSERT(index == this->index());
+ }
+
+ // Conversion for storing details as Object*.
+ explicit inline PropertyDetails(Smi* smi);
+ inline Smi* AsSmi();
+
+ PropertyType type() { return TypeField::decode(value_); }
+
+ PropertyAttributes attributes() { return AttributesField::decode(value_); }
+
+ int index() { return StorageField::decode(value_); }
+
+ inline PropertyDetails AsDeleted();
+
+ static bool IsValidIndex(int index) {
+ return StorageField::is_valid(index);
+ }
+
+ bool IsReadOnly() { return (attributes() & READ_ONLY) != 0; }
+ bool IsDontDelete() { return (attributes() & DONT_DELETE) != 0; }
+ bool IsDontEnum() { return (attributes() & DONT_ENUM) != 0; }
+ bool IsDeleted() { return DeletedField::decode(value_) != 0;}
+
+ // Bit fields in value_ (type, shift, size). Must be public so the
+ // constants can be embedded in generated code.
+ class TypeField: public BitField<PropertyType, 0, 4> {};
+ class AttributesField: public BitField<PropertyAttributes, 4, 3> {};
+ class DeletedField: public BitField<uint32_t, 7, 1> {};
+ class StorageField: public BitField<uint32_t, 8, 32-8> {};
+
+ static const int kInitialIndex = 1;
+
+ private:
+ uint32_t value_;
+};
+
+} } // namespace v8::internal
+
+#endif // V8_PROPERTY_DETAILS_H_
diff --git a/src/3rdparty/v8/src/property.cc b/src/3rdparty/v8/src/property.cc
index 6e043e2..78f237d 100644
--- a/src/3rdparty/v8/src/property.cc
+++ b/src/3rdparty/v8/src/property.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -91,6 +91,9 @@ void LookupResult::Print(FILE* out) {
break;
case CONSTANT_TRANSITION:
PrintF(out, " -type = constant property transition\n");
+ PrintF(out, " -map:\n");
+ GetTransitionMap()->Print(out);
+ PrintF(out, "\n");
break;
case NULL_DESCRIPTOR:
PrintF(out, " =type = null descriptor\n");
@@ -111,4 +114,28 @@ void Descriptor::Print(FILE* out) {
#endif
+bool Descriptor::ContainsTransition() {
+ switch (details_.type()) {
+ case MAP_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ return true;
+ case CALLBACKS: {
+ if (!value_->IsAccessorPair()) return false;
+ AccessorPair* accessors = AccessorPair::cast(value_);
+ return accessors->getter()->IsMap() || accessors->setter()->IsMap();
+ }
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // Keep the compiler happy.
+ return false;
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/property.h b/src/3rdparty/v8/src/property.h
index ffea41e..04f78b2 100644
--- a/src/3rdparty/v8/src/property.h
+++ b/src/3rdparty/v8/src/property.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,11 +49,8 @@ class Descriptor BASE_EMBEDDED {
MUST_USE_RESULT MaybeObject* KeyToSymbol() {
if (!StringShape(key_).IsSymbol()) {
- Object* result;
- { MaybeObject* maybe_result = HEAP->LookupSymbol(key_);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- key_ = String::cast(result);
+ MaybeObject* maybe_result = HEAP->LookupSymbol(key_);
+ if (!maybe_result->To(&key_)) return maybe_result;
}
return key_;
}
@@ -71,6 +68,8 @@ class Descriptor BASE_EMBEDDED {
details_ = PropertyDetails(details_.attributes(), details_.type(), index);
}
+ bool ContainsTransition();
+
private:
String* key_;
Object* value_;
@@ -162,6 +161,35 @@ class CallbacksDescriptor: public Descriptor {
};
+template <class T>
+bool IsPropertyDescriptor(T* desc) {
+ switch (desc->type()) {
+ case NORMAL:
+ case FIELD:
+ case CONSTANT_FUNCTION:
+ case HANDLER:
+ case INTERCEPTOR:
+ return true;
+ case CALLBACKS: {
+ Object* callback_object = desc->GetCallbackObject();
+ // Non-JavaScript (i.e. native) accessors are always a property, otherwise
+ // either the getter or the setter must be an accessor. Put another way:
+ // If we only see map transitions and holes in a pair, this is not a
+ // property.
+ return (!callback_object->IsAccessorPair() ||
+ AccessorPair::cast(callback_object)->ContainsAccessor());
+ }
+ case MAP_TRANSITION:
+ case ELEMENTS_TRANSITION:
+ case CONSTANT_TRANSITION:
+ case NULL_DESCRIPTOR:
+ return false;
+ }
+ UNREACHABLE(); // keep the compiler happy
+ return false;
+}
+
+
class LookupResult BASE_EMBEDDED {
public:
explicit LookupResult(Isolate* isolate)
@@ -259,15 +287,9 @@ class LookupResult BASE_EMBEDDED {
bool IsFound() { return lookup_type_ != NOT_FOUND; }
bool IsHandler() { return lookup_type_ == HANDLER_TYPE; }
- // Is the result is a property excluding transitions and the null
- // descriptor?
+ // Is the result is a property excluding transitions and the null descriptor?
bool IsProperty() {
- return IsFound() && (type() < FIRST_PHANTOM_PROPERTY_TYPE);
- }
-
- // Is the result a property or a transition?
- bool IsPropertyOrTransition() {
- return IsFound() && (type() != NULL_DESCRIPTOR);
+ return IsFound() && IsPropertyDescriptor(this);
}
bool IsCacheable() { return cacheable_; }
@@ -292,10 +314,12 @@ class LookupResult BASE_EMBEDDED {
}
}
+
Map* GetTransitionMap() {
ASSERT(lookup_type_ == DESCRIPTOR_TYPE);
- ASSERT(type() == MAP_TRANSITION || type() == CONSTANT_TRANSITION ||
- type() == ELEMENTS_TRANSITION);
+ ASSERT(type() == MAP_TRANSITION ||
+ type() == ELEMENTS_TRANSITION ||
+ type() == CONSTANT_TRANSITION);
return Map::cast(GetValue());
}
diff --git a/src/3rdparty/v8/src/proxy.js b/src/3rdparty/v8/src/proxy.js
index 3cd467f..4e86c88 100644
--- a/src/3rdparty/v8/src/proxy.js
+++ b/src/3rdparty/v8/src/proxy.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"use strict";
+
global.Proxy = new $Object();
var $Proxy = global.Proxy
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp-inl.h b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp-inl.h
index f2a4e85..a767ec0 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp-inl.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp-inl.h
@@ -62,6 +62,16 @@ void RegExpMacroAssemblerIrregexp::Emit16(uint32_t word) {
}
+void RegExpMacroAssemblerIrregexp::Emit8(uint32_t word) {
+ ASSERT(pc_ <= buffer_.length());
+ if (pc_ == buffer_.length()) {
+ Expand();
+ }
+ *reinterpret_cast<unsigned char*>(buffer_.start() + pc_) = word;
+ pc_ += 1;
+}
+
+
void RegExpMacroAssemblerIrregexp::Emit32(uint32_t word) {
ASSERT(pc_ <= buffer_.length());
if (pc_ + 3 >= buffer_.length()) {
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
index 322efa1..aa67919 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.cc
@@ -352,6 +352,42 @@ void RegExpMacroAssemblerIrregexp::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerIrregexp::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ Emit(BC_CHECK_CHAR_IN_RANGE, 0);
+ Emit16(from);
+ Emit16(to);
+ EmitOrLink(on_in_range);
+}
+
+
+void RegExpMacroAssemblerIrregexp::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ Emit(BC_CHECK_CHAR_NOT_IN_RANGE, 0);
+ Emit16(from);
+ Emit16(to);
+ EmitOrLink(on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerIrregexp::CheckBitInTable(
+ Handle<ByteArray> table, Label* on_bit_set) {
+ Emit(BC_CHECK_BIT_IN_TABLE, 0);
+ EmitOrLink(on_bit_set);
+ for (int i = 0; i < kTableSize; i += kBitsPerByte) {
+ int byte = 0;
+ for (int j = 0; j < kBitsPerByte; j++) {
+ if (table->get(i + j) != 0) byte |= 1 << j;
+ }
+ Emit8(byte);
+ }
+}
+
+
void RegExpMacroAssemblerIrregexp::CheckNotBackReference(int start_reg,
Label* on_not_equal) {
ASSERT(start_reg >= 0);
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
index 262ead2..25cb68d 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-irregexp.h
@@ -93,6 +93,13 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
virtual void CheckNotBackReference(int start_reg, Label* on_no_match);
virtual void CheckNotBackReferenceIgnoreCase(int start_reg,
Label* on_no_match);
@@ -114,6 +121,7 @@ class RegExpMacroAssemblerIrregexp: public RegExpMacroAssembler {
inline void EmitOrLink(Label* label);
inline void Emit32(uint32_t x);
inline void Emit16(uint32_t x);
+ inline void Emit8(uint32_t x);
inline void Emit(uint32_t bc, uint32_t arg);
// Bytecode buffer.
int length();
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
index f843278..b7aeac4 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.cc
@@ -198,24 +198,55 @@ void RegExpMacroAssemblerTracer::LoadCurrentCharacter(int cp_offset,
}
+class PrintablePrinter {
+ public:
+ explicit PrintablePrinter(uc16 character) : character_(character) { }
+
+ const char* operator*() {
+ if (character_ >= ' ' && character_ <= '~') {
+ buffer_[0] = '(';
+ buffer_[1] = static_cast<char>(character_);
+ buffer_[2] = ')';
+ buffer_[3] = '\0';
+ } else {
+ buffer_[0] = '\0';
+ }
+ return &buffer_[0];
+ };
+
+ private:
+ uc16 character_;
+ char buffer_[4];
+};
+
+
void RegExpMacroAssemblerTracer::CheckCharacterLT(uc16 limit, Label* on_less) {
- PrintF(" CheckCharacterLT(c='u%04x', label[%08x]);\n",
- limit, LabelToInt(on_less));
+ PrintablePrinter printable(limit);
+ PrintF(" CheckCharacterLT(c=0x%04x%s, label[%08x]);\n",
+ limit,
+ *printable,
+ LabelToInt(on_less));
assembler_->CheckCharacterLT(limit, on_less);
}
void RegExpMacroAssemblerTracer::CheckCharacterGT(uc16 limit,
Label* on_greater) {
- PrintF(" CheckCharacterGT(c='u%04x', label[%08x]);\n",
- limit, LabelToInt(on_greater));
+ PrintablePrinter printable(limit);
+ PrintF(" CheckCharacterGT(c=0x%04x%s, label[%08x]);\n",
+ limit,
+ *printable,
+ LabelToInt(on_greater));
assembler_->CheckCharacterGT(limit, on_greater);
}
void RegExpMacroAssemblerTracer::CheckCharacter(unsigned c, Label* on_equal) {
- PrintF(" CheckCharacter(c='u%04x', label[%08x]);\n",
- c, LabelToInt(on_equal));
+ PrintablePrinter printable(c);
+ PrintF(" CheckCharacter(c=0x%04x%s, label[%08x]);\n",
+ c,
+ *printable,
+ LabelToInt(on_equal));
assembler_->CheckCharacter(c, on_equal);
}
@@ -234,8 +265,11 @@ void RegExpMacroAssemblerTracer::CheckNotAtStart(Label* on_not_at_start) {
void RegExpMacroAssemblerTracer::CheckNotCharacter(unsigned c,
Label* on_not_equal) {
- PrintF(" CheckNotCharacter(c='u%04x', label[%08x]);\n",
- c, LabelToInt(on_not_equal));
+ PrintablePrinter printable(c);
+ PrintF(" CheckNotCharacter(c=0x%04x%s, label[%08x]);\n",
+ c,
+ *printable,
+ LabelToInt(on_not_equal));
assembler_->CheckNotCharacter(c, on_not_equal);
}
@@ -244,8 +278,10 @@ void RegExpMacroAssemblerTracer::CheckCharacterAfterAnd(
unsigned c,
unsigned mask,
Label* on_equal) {
- PrintF(" CheckCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
+ PrintablePrinter printable(c);
+ PrintF(" CheckCharacterAfterAnd(c=0x%04x%s, mask=0x%04x, label[%08x]);\n",
c,
+ *printable,
mask,
LabelToInt(on_equal));
assembler_->CheckCharacterAfterAnd(c, mask, on_equal);
@@ -256,8 +292,10 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterAnd(
unsigned c,
unsigned mask,
Label* on_not_equal) {
- PrintF(" CheckNotCharacterAfterAnd(c='u%04x', mask=0x%04x, label[%08x]);\n",
+ PrintablePrinter printable(c);
+ PrintF(" CheckNotCharacterAfterAnd(c=0x%04x%s, mask=0x%04x, label[%08x]);\n",
c,
+ *printable,
mask,
LabelToInt(on_not_equal));
assembler_->CheckNotCharacterAfterAnd(c, mask, on_not_equal);
@@ -269,7 +307,7 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- PrintF(" CheckNotCharacterAfterMinusAnd(c='u%04x', minus=%04x, mask=0x%04x, "
+ PrintF(" CheckNotCharacterAfterMinusAnd(c=0x%04x, minus=%04x, mask=0x%04x, "
"label[%08x]);\n",
c,
minus,
@@ -279,6 +317,53 @@ void RegExpMacroAssemblerTracer::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerTracer::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ PrintablePrinter printable_from(from);
+ PrintablePrinter printable_to(to);
+ PrintF(" CheckCharacterInRange(from=0x%04x%s, to=0x%04x%s, label[%08x]);\n",
+ from,
+ *printable_from,
+ to,
+ *printable_to,
+ LabelToInt(on_not_in_range));
+ assembler_->CheckCharacterInRange(from, to, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerTracer::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ PrintablePrinter printable_from(from);
+ PrintablePrinter printable_to(to);
+ PrintF(
+ " CheckCharacterNotInRange(from=0x%04x%s," " to=%04x%s, label[%08x]);\n",
+ from,
+ *printable_from,
+ to,
+ *printable_to,
+ LabelToInt(on_in_range));
+ assembler_->CheckCharacterNotInRange(from, to, on_in_range);
+}
+
+
+void RegExpMacroAssemblerTracer::CheckBitInTable(
+ Handle<ByteArray> table, Label* on_bit_set) {
+ PrintF(" CheckBitInTable(label[%08x] ", LabelToInt(on_bit_set));
+ for (int i = 0; i < kTableSize; i++) {
+ PrintF("%c", table->get(i) != 0 ? 'X' : '.');
+ if (i % 32 == 31 && i != kTableMask) {
+ PrintF("\n ");
+ }
+ }
+ PrintF(");\n");
+ assembler_->CheckBitInTable(table, on_bit_set);
+}
+
+
void RegExpMacroAssemblerTracer::CheckNotBackReference(int start_reg,
Label* on_no_match) {
PrintF(" CheckNotBackReference(register=%d, label[%08x]);\n", start_reg,
@@ -314,7 +399,7 @@ void RegExpMacroAssemblerTracer::CheckCharacters(Vector<const uc16> str,
PrintF(" %s(str=\"",
check_end_of_string ? "CheckCharacters" : "CheckCharactersUnchecked");
for (int i = 0; i < str.length(); i++) {
- PrintF("u%04x", str[i]);
+ PrintF("0x%04x", str[i]);
}
PrintF("\", cp_offset=%d, label[%08x])\n",
cp_offset, LabelToInt(on_failure));
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
index 1cf0349..3fd4d8b 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler-tracer.h
@@ -68,6 +68,13 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler {
uc16 minus,
uc16 and_with,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
virtual bool CheckSpecialCharacterClass(uc16 type,
Label* on_no_match);
virtual void Fail();
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler.cc b/src/3rdparty/v8/src/regexp-macro-assembler.cc
index f91ea93..b6fb3c5 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler.cc
+++ b/src/3rdparty/v8/src/regexp-macro-assembler.cc
@@ -81,7 +81,7 @@ const byte* NativeRegExpMacroAssembler::StringCharacterPosition(
if (subject->IsAsciiRepresentation()) {
const byte* address;
if (StringShape(subject).IsExternal()) {
- const char* data = ExternalAsciiString::cast(subject)->resource()->data();
+ const char* data = ExternalAsciiString::cast(subject)->GetChars();
address = reinterpret_cast<const byte*>(data);
} else {
ASSERT(subject->IsSeqAsciiString());
@@ -92,7 +92,7 @@ const byte* NativeRegExpMacroAssembler::StringCharacterPosition(
}
const uc16* data;
if (StringShape(subject).IsExternal()) {
- data = ExternalTwoByteString::cast(subject)->resource()->data();
+ data = ExternalTwoByteString::cast(subject)->GetChars();
} else {
ASSERT(subject->IsSeqTwoByteString());
data = SeqTwoByteString::cast(subject)->GetChars();
@@ -133,7 +133,7 @@ NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Match(
subject_ptr = slice->parent();
slice_offset = slice->offset();
}
- // Ensure that an underlying string has the same ascii-ness.
+ // Ensure that an underlying string has the same ASCII-ness.
bool is_ascii = subject_ptr->IsAsciiRepresentation();
ASSERT(subject_ptr->IsExternalString() || subject_ptr->IsSeqString());
// String is now either Sequential or External
diff --git a/src/3rdparty/v8/src/regexp-macro-assembler.h b/src/3rdparty/v8/src/regexp-macro-assembler.h
index 0314c70..8587435 100644
--- a/src/3rdparty/v8/src/regexp-macro-assembler.h
+++ b/src/3rdparty/v8/src/regexp-macro-assembler.h
@@ -45,6 +45,11 @@ class RegExpMacroAssembler {
static const int kMaxRegister = (1 << 16) - 1;
static const int kMaxCPOffset = (1 << 15) - 1;
static const int kMinCPOffset = -(1 << 15);
+
+ static const int kTableSizeBits = 7;
+ static const int kTableSize = 1 << kTableSizeBits;
+ static const int kTableMask = kTableSize - 1;
+
enum IrregexpImplementation {
kIA32Implementation,
kARMImplementation,
@@ -106,12 +111,23 @@ class RegExpMacroAssembler {
virtual void CheckNotCharacterAfterAnd(unsigned c,
unsigned and_with,
Label* on_not_equal) = 0;
- // Subtract a constant from the current character, then or with the given
+ // Subtract a constant from the current character, then and with the given
// constant and then check for a match with c.
virtual void CheckNotCharacterAfterMinusAnd(uc16 c,
uc16 minus,
uc16 and_with,
Label* on_not_equal) = 0;
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to, // Both inclusive.
+ Label* on_in_range) = 0;
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to, // Both inclusive.
+ Label* on_not_in_range) = 0;
+
+ // The current character (modulus the kTableSize) is looked up in the byte
+ // array, and if the found byte is non-zero, we jump to the on_bit_set label.
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set) = 0;
+
virtual void CheckNotRegistersEqual(int reg1,
int reg2,
Label* on_not_equal) = 0;
diff --git a/src/3rdparty/v8/src/regexp.js b/src/3rdparty/v8/src/regexp.js
index f373ceb..bc9508d 100644
--- a/src/3rdparty/v8/src/regexp.js
+++ b/src/3rdparty/v8/src/regexp.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,7 +28,7 @@
// Expect $Object = global.Object;
// Expect $Array = global.Array;
-const $RegExp = global.RegExp;
+var $RegExp = global.RegExp;
// A recursive descent parser for Patterns according to the grammar of
// ECMA-262 15.10.1, with deviations noted below.
@@ -250,29 +250,32 @@ function RegExpTest(string) {
// Remove irrelevant preceeding '.*' in a non-global test regexp.
// The expression checks whether this.source starts with '.*' and
// that the third char is not a '?'.
- if (%_StringCharCodeAt(this.source, 0) == 46 && // '.'
- %_StringCharCodeAt(this.source, 1) == 42 && // '*'
- %_StringCharCodeAt(this.source, 2) != 63) { // '?'
- if (!%_ObjectEquals(regexp_key, this)) {
- regexp_key = this;
- regexp_val = new $RegExp(SubString(this.source, 2, this.source.length),
- (!this.ignoreCase
- ? !this.multiline ? "" : "m"
- : !this.multiline ? "i" : "im"));
- }
- if (%_RegExpExec(regexp_val, string, 0, lastMatchInfo) === null) {
- return false;
- }
+ var regexp = this;
+ if (%_StringCharCodeAt(regexp.source, 0) == 46 && // '.'
+ %_StringCharCodeAt(regexp.source, 1) == 42 && // '*'
+ %_StringCharCodeAt(regexp.source, 2) != 63) { // '?'
+ regexp = TrimRegExp(regexp);
}
- %_Log('regexp', 'regexp-exec,%0r,%1S,%2i', [this, string, lastIndex]);
+ %_Log('regexp', 'regexp-exec,%0r,%1S,%2i', [regexp, string, lastIndex]);
// matchIndices is either null or the lastMatchInfo array.
- var matchIndices = %_RegExpExec(this, string, 0, lastMatchInfo);
+ var matchIndices = %_RegExpExec(regexp, string, 0, lastMatchInfo);
if (matchIndices === null) return false;
lastMatchInfoOverride = null;
return true;
}
}
+function TrimRegExp(regexp) {
+ if (!%_ObjectEquals(regexp_key, regexp)) {
+ regexp_key = regexp;
+ regexp_val =
+ new $RegExp(SubString(regexp.source, 2, regexp.source.length),
+ (regexp.ignoreCase ? regexp.multiline ? "im" : "i"
+ : regexp.multiline ? "m" : ""));
+ }
+ return regexp_val;
+}
+
function RegExpToString() {
// If this.source is an empty string, output /(?:)/.
@@ -413,63 +416,67 @@ function SetUpRegExp() {
// The properties input, $input, and $_ are aliases for each other. When this
// value is set the value it is set to is coerced to a string.
// Getter and setter for the input.
- function RegExpGetInput() {
+ var RegExpGetInput = function() {
var regExpInput = LAST_INPUT(lastMatchInfo);
return IS_UNDEFINED(regExpInput) ? "" : regExpInput;
- }
- function RegExpSetInput(string) {
+ };
+ var RegExpSetInput = function(string) {
LAST_INPUT(lastMatchInfo) = ToString(string);
};
- %DefineAccessor($RegExp, 'input', GETTER, RegExpGetInput, DONT_DELETE);
- %DefineAccessor($RegExp, 'input', SETTER, RegExpSetInput, DONT_DELETE);
- %DefineAccessor($RegExp, '$_', GETTER, RegExpGetInput, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$_', SETTER, RegExpSetInput, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$input', GETTER, RegExpGetInput, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$input', SETTER, RegExpSetInput, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'input', RegExpGetInput,
+ RegExpSetInput, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$_', RegExpGetInput,
+ RegExpSetInput, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$input', RegExpGetInput,
+ RegExpSetInput, DONT_ENUM | DONT_DELETE);
// The properties multiline and $* are aliases for each other. When this
// value is set in SpiderMonkey, the value it is set to is coerced to a
// boolean. We mimic that behavior with a slight difference: in SpiderMonkey
// the value of the expression 'RegExp.multiline = null' (for instance) is the
- // boolean false (ie, the value after coercion), while in V8 it is the value
- // null (ie, the value before coercion).
+ // boolean false (i.e., the value after coercion), while in V8 it is the value
+ // null (i.e., the value before coercion).
// Getter and setter for multiline.
var multiline = false;
- function RegExpGetMultiline() { return multiline; };
- function RegExpSetMultiline(flag) { multiline = flag ? true : false; };
+ var RegExpGetMultiline = function() { return multiline; };
+ var RegExpSetMultiline = function(flag) { multiline = flag ? true : false; };
- %DefineAccessor($RegExp, 'multiline', GETTER, RegExpGetMultiline, DONT_DELETE);
- %DefineAccessor($RegExp, 'multiline', SETTER, RegExpSetMultiline, DONT_DELETE);
- %DefineAccessor($RegExp, '$*', GETTER, RegExpGetMultiline, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$*', SETTER, RegExpSetMultiline, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'multiline', RegExpGetMultiline,
+ RegExpSetMultiline, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$*', RegExpGetMultiline,
+ RegExpSetMultiline,
+ DONT_ENUM | DONT_DELETE);
- function NoOpSetter(ignored) {}
+ var NoOpSetter = function(ignored) {};
// Static properties set by a successful match.
- %DefineAccessor($RegExp, 'lastMatch', GETTER, RegExpGetLastMatch, DONT_DELETE);
- %DefineAccessor($RegExp, 'lastMatch', SETTER, NoOpSetter, DONT_DELETE);
- %DefineAccessor($RegExp, '$&', GETTER, RegExpGetLastMatch, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$&', SETTER, NoOpSetter, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, 'lastParen', GETTER, RegExpGetLastParen, DONT_DELETE);
- %DefineAccessor($RegExp, 'lastParen', SETTER, NoOpSetter, DONT_DELETE);
- %DefineAccessor($RegExp, '$+', GETTER, RegExpGetLastParen, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$+', SETTER, NoOpSetter, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, 'leftContext', GETTER, RegExpGetLeftContext, DONT_DELETE);
- %DefineAccessor($RegExp, 'leftContext', SETTER, NoOpSetter, DONT_DELETE);
- %DefineAccessor($RegExp, '$`', GETTER, RegExpGetLeftContext, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, '$`', SETTER, NoOpSetter, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, 'rightContext', GETTER, RegExpGetRightContext, DONT_DELETE);
- %DefineAccessor($RegExp, 'rightContext', SETTER, NoOpSetter, DONT_DELETE);
- %DefineAccessor($RegExp, "$'", GETTER, RegExpGetRightContext, DONT_ENUM | DONT_DELETE);
- %DefineAccessor($RegExp, "$'", SETTER, NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'lastMatch', RegExpGetLastMatch,
+ NoOpSetter, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$&', RegExpGetLastMatch,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'lastParen', RegExpGetLastParen,
+ NoOpSetter, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$+', RegExpGetLastParen,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'leftContext',
+ RegExpGetLeftContext, NoOpSetter,
+ DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$`', RegExpGetLeftContext,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, 'rightContext',
+ RegExpGetRightContext, NoOpSetter,
+ DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, "$'", RegExpGetRightContext,
+ NoOpSetter, DONT_ENUM | DONT_DELETE);
for (var i = 1; i < 10; ++i) {
- %DefineAccessor($RegExp, '$' + i, GETTER, RegExpMakeCaptureGetter(i), DONT_DELETE);
- %DefineAccessor($RegExp, '$' + i, SETTER, NoOpSetter, DONT_DELETE);
+ %DefineOrRedefineAccessorProperty($RegExp, '$' + i,
+ RegExpMakeCaptureGetter(i), NoOpSetter,
+ DONT_DELETE);
}
}
diff --git a/src/3rdparty/v8/src/rewriter.cc b/src/3rdparty/v8/src/rewriter.cc
index 3d4c2dc..e58ddb4 100644
--- a/src/3rdparty/v8/src/rewriter.cc
+++ b/src/3rdparty/v8/src/rewriter.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,12 +42,18 @@ class Processor: public AstVisitor {
: result_(result),
result_assigned_(false),
is_set_(false),
- in_try_(false) {
- }
+ in_try_(false),
+ factory_(isolate()) { }
+
+ virtual ~Processor() { }
void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; }
+ AstNodeFactory<AstNullVisitor>* factory() {
+ return &factory_;
+ }
+
private:
Variable* result_;
@@ -64,15 +70,13 @@ class Processor: public AstVisitor {
bool is_set_;
bool in_try_;
+ AstNodeFactory<AstNullVisitor> factory_;
+
Expression* SetResult(Expression* value) {
result_assigned_ = true;
- Zone* zone = isolate()->zone();
- VariableProxy* result_proxy = new(zone) VariableProxy(isolate(), result_);
- return new(zone) Assignment(isolate(),
- Token::ASSIGN,
- result_proxy,
- value,
- RelocInfo::kNoPosition);
+ VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
+ return factory()->NewAssignment(
+ Token::ASSIGN, result_proxy, value, RelocInfo::kNoPosition);
}
// Node visitors.
@@ -107,7 +111,7 @@ void Processor::VisitBlock(Block* node) {
void Processor::VisitExpressionStatement(ExpressionStatement* node) {
// Rewrite : <x>; -> .result = <x>;
- if (!is_set_) {
+ if (!is_set_ && !node->expression()->IsThrow()) {
node->set_expression(SetResult(node->expression()));
if (!in_try_) is_set_ = true;
}
@@ -205,7 +209,15 @@ void Processor::VisitWithStatement(WithStatement* node) {
// Do nothing:
-void Processor::VisitDeclaration(Declaration* node) {}
+void Processor::VisitVariableDeclaration(VariableDeclaration* node) {}
+void Processor::VisitFunctionDeclaration(FunctionDeclaration* node) {}
+void Processor::VisitModuleDeclaration(ModuleDeclaration* node) {}
+void Processor::VisitImportDeclaration(ImportDeclaration* node) {}
+void Processor::VisitExportDeclaration(ExportDeclaration* node) {}
+void Processor::VisitModuleLiteral(ModuleLiteral* node) {}
+void Processor::VisitModuleVariable(ModuleVariable* node) {}
+void Processor::VisitModulePath(ModulePath* node) {}
+void Processor::VisitModuleUrl(ModuleUrl* node) {}
void Processor::VisitEmptyStatement(EmptyStatement* node) {}
void Processor::VisitReturnStatement(ReturnStatement* node) {}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {}
@@ -236,10 +248,21 @@ bool Rewriter::Rewrite(CompilationInfo* info) {
if (processor.HasStackOverflow()) return false;
if (processor.result_assigned()) {
- Isolate* isolate = info->isolate();
- Zone* zone = isolate->zone();
- VariableProxy* result_proxy = new(zone) VariableProxy(isolate, result);
- body->Add(new(zone) ReturnStatement(result_proxy));
+ ASSERT(function->end_position() != RelocInfo::kNoPosition);
+ // Set the position of the assignment statement one character past the
+ // source code, such that it definitely is not in the source code range
+ // of an immediate inner scope. For example in
+ // eval('with ({x:1}) x = 1');
+ // the end position of the function generated for executing the eval code
+ // coincides with the end of the with scope which is the position of '1'.
+ int position = function->end_position();
+ VariableProxy* result_proxy = processor.factory()->NewVariableProxy(
+ result->name(), false, position);
+ result_proxy->BindTo(result);
+ Statement* result_statement =
+ processor.factory()->NewReturnStatement(result_proxy);
+ result_statement->set_statement_pos(position);
+ body->Add(result_statement);
}
}
diff --git a/src/3rdparty/v8/src/runtime-profiler.cc b/src/3rdparty/v8/src/runtime-profiler.cc
index 520dd39..568e48e 100644
--- a/src/3rdparty/v8/src/runtime-profiler.cc
+++ b/src/3rdparty/v8/src/runtime-profiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -46,6 +46,8 @@ namespace internal {
// Optimization sampler constants.
static const int kSamplerFrameCount = 2;
+
+// Constants for statistical profiler.
static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
static const int kSamplerTicksBetweenThresholdAdjustment = 32;
@@ -58,14 +60,30 @@ static const int kSamplerThresholdSizeFactorInit = 3;
static const int kSizeLimit = 1500;
+// Constants for counter based profiler.
+
+// Number of times a function has to be seen on the stack before it is
+// optimized.
+static const int kProfilerTicksBeforeOptimization = 2;
+// If a function does not have enough type info (according to
+// FLAG_type_info_threshold), but has seen a huge number of ticks,
+// optimize it as it is.
+static const int kTicksWhenNotEnoughTypeInfo = 100;
+// We only have one byte to store the number of ticks.
+STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
+
+// Maximum size in bytes of generated code for a function to be optimized
+// the very first time it is seen on the stack.
+static const int kMaxSizeEarlyOpt = 500;
+
Atomic32 RuntimeProfiler::state_ = 0;
-// TODO(isolates): Create the semaphore lazily and clean it up when no
-// longer required.
-Semaphore* RuntimeProfiler::semaphore_ = OS::CreateSemaphore(0);
+
+// TODO(isolates): Clean up the semaphore when it is no longer required.
+static LazySemaphore<0>::type semaphore = LAZY_SEMAPHORE_INITIALIZER;
#ifdef DEBUG
-bool RuntimeProfiler::has_been_globally_setup_ = false;
+bool RuntimeProfiler::has_been_globally_set_up_ = false;
#endif
bool RuntimeProfiler::enabled_ = false;
@@ -76,27 +94,53 @@ RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
sampler_ticks_until_threshold_adjustment_(
kSamplerTicksBetweenThresholdAdjustment),
- sampler_window_position_(0) {
+ sampler_window_position_(0),
+ any_ic_changed_(false),
+ code_generated_(false) {
ClearSampleBuffer();
}
-void RuntimeProfiler::GlobalSetup() {
- ASSERT(!has_been_globally_setup_);
+void RuntimeProfiler::GlobalSetUp() {
+ ASSERT(!has_been_globally_set_up_);
enabled_ = V8::UseCrankshaft() && FLAG_opt;
#ifdef DEBUG
- has_been_globally_setup_ = true;
+ has_been_globally_set_up_ = true;
#endif
}
-void RuntimeProfiler::Optimize(JSFunction* function) {
+static void GetICCounts(JSFunction* function,
+ int* ic_with_type_info_count,
+ int* ic_total_count,
+ int* percentage) {
+ *ic_total_count = 0;
+ *ic_with_type_info_count = 0;
+ Object* raw_info =
+ function->shared()->code()->type_feedback_info();
+ if (raw_info->IsTypeFeedbackInfo()) {
+ TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
+ *ic_with_type_info_count = info->ic_with_type_info_count();
+ *ic_total_count = info->ic_total_count();
+ }
+ *percentage = *ic_total_count > 0
+ ? 100 * *ic_with_type_info_count / *ic_total_count
+ : 100;
+}
+
+
+void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
ASSERT(function->IsOptimizable());
if (FLAG_trace_opt) {
PrintF("[marking ");
function->PrintName();
PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
- PrintF(" for recompilation");
+ PrintF(" for recompilation, reason: %s", reason);
+ if (FLAG_type_info_threshold > 0) {
+ int typeinfo, total, percentage;
+ GetICCounts(function, &typeinfo, &total, &percentage);
+ PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
+ }
PrintF("]\n");
}
@@ -135,15 +179,22 @@ void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
// Get the stack check stub code object to match against. We aren't
// prepared to generate it, but we don't expect to have to.
- StackCheckStub check_stub;
- Object* check_code;
- MaybeObject* maybe_check_code = check_stub.TryGetCode();
- if (maybe_check_code->ToObject(&check_code)) {
+ bool found_code = false;
+ Code* stack_check_code = NULL;
+ if (FLAG_count_based_interrupts) {
+ InterruptStub interrupt_stub;
+ found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
+ } else // NOLINT
+ { // NOLINT
+ StackCheckStub check_stub;
+ found_code = check_stub.FindCodeInCache(&stack_check_code);
+ }
+ if (found_code) {
Code* replacement_code =
isolate_->builtins()->builtin(Builtins::kOnStackReplacement);
Code* unoptimized_code = shared->code();
Deoptimizer::PatchStackCheckCode(unoptimized_code,
- Code::cast(check_code),
+ stack_check_code,
replacement_code);
}
}
@@ -187,68 +238,123 @@ void RuntimeProfiler::OptimizeNow() {
JSFunction* samples[kSamplerFrameCount];
int sample_count = 0;
int frame_count = 0;
+ int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
+ : kSamplerFrameCount;
for (JavaScriptFrameIterator it(isolate_);
- frame_count++ < kSamplerFrameCount && !it.done();
+ frame_count++ < frame_count_limit && !it.done();
it.Advance()) {
JavaScriptFrame* frame = it.frame();
JSFunction* function = JSFunction::cast(frame->function());
- // Adjust threshold each time we have processed
- // a certain number of ticks.
- if (sampler_ticks_until_threshold_adjustment_ > 0) {
- sampler_ticks_until_threshold_adjustment_--;
- if (sampler_ticks_until_threshold_adjustment_ <= 0) {
- // If the threshold is not already at the minimum
- // modify and reset the ticks until next adjustment.
- if (sampler_threshold_ > kSamplerThresholdMin) {
- sampler_threshold_ -= kSamplerThresholdDelta;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (!FLAG_watch_ic_patching) {
+ // Adjust threshold each time we have processed
+ // a certain number of ticks.
+ if (sampler_ticks_until_threshold_adjustment_ > 0) {
+ sampler_ticks_until_threshold_adjustment_--;
+ if (sampler_ticks_until_threshold_adjustment_ <= 0) {
+ // If the threshold is not already at the minimum
+ // modify and reset the ticks until next adjustment.
+ if (sampler_threshold_ > kSamplerThresholdMin) {
+ sampler_threshold_ -= kSamplerThresholdDelta;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
}
}
+ Code* shared_code = function->shared()->code();
+ if (shared_code->kind() != Code::FUNCTION) continue;
+
if (function->IsMarkedForLazyRecompilation()) {
- Code* unoptimized = function->shared()->code();
- int nesting = unoptimized->allow_osr_at_loop_nesting_level();
+ int nesting = shared_code->allow_osr_at_loop_nesting_level();
if (nesting == 0) AttemptOnStackReplacement(function);
int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
- unoptimized->set_allow_osr_at_loop_nesting_level(new_nesting);
+ shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
}
// Do not record non-optimizable functions.
if (!function->IsOptimizable()) continue;
- samples[sample_count++] = function;
+ if (function->shared()->optimization_disabled()) continue;
+
+ // Only record top-level code on top of the execution stack and
+ // avoid optimizing excessively large scripts since top-level code
+ // will be executed only once.
+ const int kMaxToplevelSourceSize = 10 * 1024;
+ if (function->shared()->is_toplevel()
+ && (frame_count > 1
+ || function->shared()->SourceSize() > kMaxToplevelSourceSize)) {
+ continue;
+ }
+
+ if (FLAG_watch_ic_patching) {
+ int ticks = shared_code->profiler_ticks();
+
+ if (ticks >= kProfilerTicksBeforeOptimization) {
+ int typeinfo, total, percentage;
+ GetICCounts(function, &typeinfo, &total, &percentage);
+ if (percentage >= FLAG_type_info_threshold) {
+ // If this particular function hasn't had any ICs patched for enough
+ // ticks, optimize it now.
+ Optimize(function, "hot and stable");
+ } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
+ Optimize(function, "not much type info but very hot");
+ } else {
+ shared_code->set_profiler_ticks(ticks + 1);
+ if (FLAG_trace_opt_verbose) {
+ PrintF("[not yet optimizing ");
+ function->PrintName();
+ PrintF(", not enough type info: %d/%d (%d%%)]\n",
+ typeinfo, total, percentage);
+ }
+ }
+ } else if (!any_ic_changed_ &&
+ shared_code->instruction_size() < kMaxSizeEarlyOpt) {
+ // If no IC was patched since the last tick and this function is very
+ // small, optimistically optimize it now.
+ Optimize(function, "small function");
+ } else {
+ shared_code->set_profiler_ticks(ticks + 1);
+ }
+ } else { // !FLAG_watch_ic_patching
+ samples[sample_count++] = function;
- int function_size = function->shared()->SourceSize();
- int threshold_size_factor = (function_size > kSizeLimit)
- ? sampler_threshold_size_factor_
- : 1;
+ int function_size = function->shared()->SourceSize();
+ int threshold_size_factor = (function_size > kSizeLimit)
+ ? sampler_threshold_size_factor_
+ : 1;
- int threshold = sampler_threshold_ * threshold_size_factor;
+ int threshold = sampler_threshold_ * threshold_size_factor;
- if (LookupSample(function) >= threshold) {
- Optimize(function);
+ if (LookupSample(function) >= threshold) {
+ Optimize(function, "sampler window lookup");
+ }
}
}
-
- // Add the collected functions as samples. It's important not to do
- // this as part of collecting them because this will interfere with
- // the sample lookup in case of recursive functions.
- for (int i = 0; i < sample_count; i++) {
- AddSample(samples[i], kSamplerFrameWeight[i]);
+ if (FLAG_watch_ic_patching) {
+ any_ic_changed_ = false;
+ } else { // !FLAG_watch_ic_patching
+ // Add the collected functions as samples. It's important not to do
+ // this as part of collecting them because this will interfere with
+ // the sample lookup in case of recursive functions.
+ for (int i = 0; i < sample_count; i++) {
+ AddSample(samples[i], kSamplerFrameWeight[i]);
+ }
}
}
void RuntimeProfiler::NotifyTick() {
+ if (FLAG_count_based_interrupts) return;
isolate_->stack_guard()->RequestRuntimeProfilerTick();
}
-void RuntimeProfiler::Setup() {
- ASSERT(has_been_globally_setup_);
- ClearSampleBuffer();
+void RuntimeProfiler::SetUp() {
+ ASSERT(has_been_globally_set_up_);
+ if (!FLAG_watch_ic_patching) {
+ ClearSampleBuffer();
+ }
// If the ticker hasn't already started, make sure to do so to get
// the ticks for the runtime profiler.
if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
@@ -256,10 +362,12 @@ void RuntimeProfiler::Setup() {
void RuntimeProfiler::Reset() {
- sampler_threshold_ = kSamplerThresholdInit;
- sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
- sampler_ticks_until_threshold_adjustment_ =
- kSamplerTicksBetweenThresholdAdjustment;
+ if (!FLAG_watch_ic_patching) {
+ sampler_threshold_ = kSamplerThresholdInit;
+ sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
+ sampler_ticks_until_threshold_adjustment_ =
+ kSamplerTicksBetweenThresholdAdjustment;
+ }
}
@@ -296,7 +404,7 @@ void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
// undid the decrement done by the profiler thread. Increment again
// to get the right count of active isolates.
NoBarrier_AtomicIncrement(&state_, 1);
- semaphore_->Signal();
+ semaphore.Pointer()->Signal();
}
@@ -309,7 +417,7 @@ bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() {
Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1);
ASSERT(old_state >= -1);
if (old_state != 0) return false;
- semaphore_->Wait();
+ semaphore.Pointer()->Wait();
return true;
}
@@ -325,7 +433,7 @@ void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) {
if (new_state == 0) {
// The profiler thread is waiting. Wake it up. It must check for
// stop conditions before attempting to wait again.
- semaphore_->Signal();
+ semaphore.Pointer()->Signal();
}
thread->Join();
// The profiler thread is now stopped. Undo the increment in case it
diff --git a/src/3rdparty/v8/src/runtime-profiler.h b/src/3rdparty/v8/src/runtime-profiler.h
index 15c2097..ab6cb37 100644
--- a/src/3rdparty/v8/src/runtime-profiler.h
+++ b/src/3rdparty/v8/src/runtime-profiler.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,10 +43,10 @@ class RuntimeProfiler {
public:
explicit RuntimeProfiler(Isolate* isolate);
- static void GlobalSetup();
+ static void GlobalSetUp();
static inline bool IsEnabled() {
- ASSERT(has_been_globally_setup_);
+ ASSERT(has_been_globally_set_up_);
return enabled_;
}
@@ -54,13 +54,15 @@ class RuntimeProfiler {
void NotifyTick();
- void Setup();
+ void SetUp();
void Reset();
void TearDown();
Object** SamplerWindowAddress();
int SamplerWindowSize();
+ void NotifyICChanged() { any_ic_changed_ = true; }
+
// Rate limiting support.
// VM thread interface.
@@ -92,14 +94,14 @@ class RuntimeProfiler {
void RemoveDeadSamples();
void UpdateSamplesAfterCompact(ObjectVisitor* visitor);
+ void AttemptOnStackReplacement(JSFunction* function);
+
private:
static const int kSamplerWindowSize = 16;
static void HandleWakeUp(Isolate* isolate);
- void Optimize(JSFunction* function);
-
- void AttemptOnStackReplacement(JSFunction* function);
+ void Optimize(JSFunction* function, const char* reason);
void ClearSampleBuffer();
@@ -119,14 +121,16 @@ class RuntimeProfiler {
int sampler_window_position_;
int sampler_window_weight_[kSamplerWindowSize];
+ bool any_ic_changed_;
+ bool code_generated_;
+
// Possible state values:
// -1 => the profiler thread is waiting on the semaphore
// 0 or positive => the number of isolates running JavaScript code.
static Atomic32 state_;
- static Semaphore* semaphore_;
#ifdef DEBUG
- static bool has_been_globally_setup_;
+ static bool has_been_globally_set_up_;
#endif
static bool enabled_;
};
diff --git a/src/3rdparty/v8/src/runtime.cc b/src/3rdparty/v8/src/runtime.cc
index bce5d8e..f9b5fde 100644
--- a/src/3rdparty/v8/src/runtime.cc
+++ b/src/3rdparty/v8/src/runtime.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,6 +40,7 @@
#include "dateparser-inl.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "date.h"
#include "execution.h"
#include "global-handles.h"
#include "isolate-inl.h"
@@ -69,20 +70,20 @@ namespace internal {
// Cast the given object to a value of the specified type and store
// it in a variable with the given name. If the object is not of the
// expected type call IllegalOperation and return.
-#define CONVERT_CHECKED(Type, name, obj) \
- RUNTIME_ASSERT(obj->Is##Type()); \
- Type* name = Type::cast(obj);
-
#define CONVERT_ARG_CHECKED(Type, name, index) \
RUNTIME_ASSERT(args[index]->Is##Type()); \
+ Type* name = Type::cast(args[index]);
+
+#define CONVERT_ARG_HANDLE_CHECKED(Type, name, index) \
+ RUNTIME_ASSERT(args[index]->Is##Type()); \
Handle<Type> name = args.at<Type>(index);
// Cast the given object to a boolean and store it in a variable with
// the given name. If the object is not a boolean call IllegalOperation
// and return.
-#define CONVERT_BOOLEAN_CHECKED(name, obj) \
- RUNTIME_ASSERT(obj->IsBoolean()); \
- bool name = (obj)->IsTrue();
+#define CONVERT_BOOLEAN_ARG_CHECKED(name, index) \
+ RUNTIME_ASSERT(args[index]->IsBoolean()); \
+ bool name = args[index]->IsTrue();
// Cast the given argument to a Smi and store its value in an int variable
// with the given name. If the argument is not a Smi call IllegalOperation
@@ -106,16 +107,35 @@ namespace internal {
type name = NumberTo##Type(obj);
+// Cast the given argument to PropertyDetails and store its value in a
+// variable with the given name. If the argument is not a Smi call
+// IllegalOperation and return.
+#define CONVERT_PROPERTY_DETAILS_CHECKED(name, index) \
+ RUNTIME_ASSERT(args[index]->IsSmi()); \
+ PropertyDetails name = PropertyDetails(Smi::cast(args[index]));
+
+
// Assert that the given argument has a valid value for a StrictModeFlag
// and store it in a StrictModeFlag variable with the given name.
-#define CONVERT_STRICT_MODE_ARG(name, index) \
- ASSERT(args[index]->IsSmi()); \
- ASSERT(args.smi_at(index) == kStrictMode || \
- args.smi_at(index) == kNonStrictMode); \
+#define CONVERT_STRICT_MODE_ARG_CHECKED(name, index) \
+ RUNTIME_ASSERT(args[index]->IsSmi()); \
+ RUNTIME_ASSERT(args.smi_at(index) == kStrictMode || \
+ args.smi_at(index) == kNonStrictMode); \
StrictModeFlag name = \
static_cast<StrictModeFlag>(args.smi_at(index));
+// Assert that the given argument has a valid value for a LanguageMode
+// and store it in a LanguageMode variable with the given name.
+#define CONVERT_LANGUAGE_MODE_ARG(name, index) \
+ ASSERT(args[index]->IsSmi()); \
+ ASSERT(args.smi_at(index) == CLASSIC_MODE || \
+ args.smi_at(index) == STRICT_MODE || \
+ args.smi_at(index) == EXTENDED_MODE); \
+ LanguageMode name = \
+ static_cast<LanguageMode>(args.smi_at(index));
+
+
MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
JSObject* boilerplate) {
StackLimitCheck check(isolate);
@@ -154,7 +174,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
}
} else {
{ MaybeObject* maybe_result =
- heap->AllocateFixedArray(copy->NumberOfLocalProperties(NONE));
+ heap->AllocateFixedArray(copy->NumberOfLocalProperties());
if (!maybe_result->ToObject(&result)) return maybe_result;
}
FixedArray* names = FixedArray::cast(result);
@@ -217,7 +237,7 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
break;
}
case DICTIONARY_ELEMENTS: {
- NumberDictionary* element_dictionary = copy->element_dictionary();
+ SeededNumberDictionary* element_dictionary = copy->element_dictionary();
int capacity = element_dictionary->Capacity();
for (int i = 0; i < capacity; i++) {
Object* k = element_dictionary->KeyAt(i);
@@ -255,18 +275,6 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CloneLiteralBoilerplate) {
- CONVERT_CHECKED(JSObject, boilerplate, args[0]);
- return DeepCopyBoilerplate(isolate, boilerplate);
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CloneShallowLiteralBoilerplate) {
- CONVERT_CHECKED(JSObject, boilerplate, args[0]);
- return isolate->heap()->CopyJSObject(boilerplate);
-}
-
-
static Handle<Map> ComputeObjectLiteralMap(
Handle<Context> context,
Handle<FixedArray> constant_properties,
@@ -274,45 +282,43 @@ static Handle<Map> ComputeObjectLiteralMap(
Isolate* isolate = context->GetIsolate();
int properties_length = constant_properties->length();
int number_of_properties = properties_length / 2;
- if (FLAG_canonicalize_object_literal_maps) {
- // Check that there are only symbols and array indices among keys.
- int number_of_symbol_keys = 0;
- for (int p = 0; p != properties_length; p += 2) {
- Object* key = constant_properties->get(p);
- uint32_t element_index = 0;
- if (key->IsSymbol()) {
- number_of_symbol_keys++;
- } else if (key->ToArrayIndex(&element_index)) {
- // An index key does not require space in the property backing store.
- number_of_properties--;
- } else {
- // Bail out as a non-symbol non-index key makes caching impossible.
- // ASSERT to make sure that the if condition after the loop is false.
- ASSERT(number_of_symbol_keys != number_of_properties);
- break;
- }
+ // Check that there are only symbols and array indices among keys.
+ int number_of_symbol_keys = 0;
+ for (int p = 0; p != properties_length; p += 2) {
+ Object* key = constant_properties->get(p);
+ uint32_t element_index = 0;
+ if (key->IsSymbol()) {
+ number_of_symbol_keys++;
+ } else if (key->ToArrayIndex(&element_index)) {
+ // An index key does not require space in the property backing store.
+ number_of_properties--;
+ } else {
+ // Bail out as a non-symbol non-index key makes caching impossible.
+ // ASSERT to make sure that the if condition after the loop is false.
+ ASSERT(number_of_symbol_keys != number_of_properties);
+ break;
}
- // If we only have symbols and array indices among keys then we can
- // use the map cache in the global context.
- const int kMaxKeys = 10;
- if ((number_of_symbol_keys == number_of_properties) &&
- (number_of_symbol_keys < kMaxKeys)) {
- // Create the fixed array with the key.
- Handle<FixedArray> keys =
- isolate->factory()->NewFixedArray(number_of_symbol_keys);
- if (number_of_symbol_keys > 0) {
- int index = 0;
- for (int p = 0; p < properties_length; p += 2) {
- Object* key = constant_properties->get(p);
- if (key->IsSymbol()) {
- keys->set(index++, key);
- }
+ }
+ // If we only have symbols and array indices among keys then we can
+ // use the map cache in the global context.
+ const int kMaxKeys = 10;
+ if ((number_of_symbol_keys == number_of_properties) &&
+ (number_of_symbol_keys < kMaxKeys)) {
+ // Create the fixed array with the key.
+ Handle<FixedArray> keys =
+ isolate->factory()->NewFixedArray(number_of_symbol_keys);
+ if (number_of_symbol_keys > 0) {
+ int index = 0;
+ for (int p = 0; p < properties_length; p += 2) {
+ Object* key = constant_properties->get(p);
+ if (key->IsSymbol()) {
+ keys->set(index++, key);
}
- ASSERT(index == number_of_symbol_keys);
}
- *is_result_from_cache = true;
- return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
+ ASSERT(index == number_of_symbol_keys);
}
+ *is_result_from_cache = true;
+ return isolate->factory()->ObjectLiteralMapFromCache(context, keys);
}
*is_result_from_cache = false;
return isolate->factory()->CopyMap(
@@ -356,7 +362,7 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
Handle<JSObject> boilerplate = isolate->factory()->NewJSObjectFromMap(map);
// Normalize the elements of the boilerplate to save space if needed.
- if (!should_have_fast_elements) NormalizeElements(boilerplate);
+ if (!should_have_fast_elements) JSObject::NormalizeElements(boilerplate);
// Add the constant properties to the boilerplate.
int length = constant_properties->length();
@@ -366,7 +372,8 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
// Normalize the properties of object to avoid n^2 behavior
// when extending the object multiple properties. Indicate the number of
// properties to be added.
- NormalizeProperties(boilerplate, KEEP_INOBJECT_PROPERTIES, length / 2);
+ JSObject::NormalizeProperties(
+ boilerplate, KEEP_INOBJECT_PROPERTIES, length / 2);
}
for (int index = 0; index < length; index +=2) {
@@ -384,22 +391,18 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
if (key->IsSymbol()) {
if (Handle<String>::cast(key)->AsArrayIndex(&element_index)) {
// Array index as string (uint32).
- result = SetOwnElement(boilerplate,
- element_index,
- value,
- kNonStrictMode);
+ result = JSObject::SetOwnElement(
+ boilerplate, element_index, value, kNonStrictMode);
} else {
Handle<String> name(String::cast(*key));
ASSERT(!name->AsArrayIndex(&element_index));
- result = SetLocalPropertyIgnoreAttributes(boilerplate, name,
- value, NONE);
+ result = JSObject::SetLocalPropertyIgnoreAttributes(
+ boilerplate, name, value, NONE);
}
} else if (key->ToArrayIndex(&element_index)) {
// Array index (uint32).
- result = SetOwnElement(boilerplate,
- element_index,
- value,
- kNonStrictMode);
+ result = JSObject::SetOwnElement(
+ boilerplate, element_index, value, kNonStrictMode);
} else {
// Non-uint32 number.
ASSERT(key->IsNumber());
@@ -409,8 +412,8 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
const char* str = DoubleToCString(num, buffer);
Handle<String> name =
isolate->factory()->NewStringFromAscii(CStrVector(str));
- result = SetLocalPropertyIgnoreAttributes(boilerplate, name,
- value, NONE);
+ result = JSObject::SetLocalPropertyIgnoreAttributes(
+ boilerplate, name, value, NONE);
}
// If setting the property on the boilerplate throws an
// exception, the exception is converted to an empty handle in
@@ -424,18 +427,35 @@ static Handle<Object> CreateObjectLiteralBoilerplate(
// computed properties have been assigned so that we can generate
// constant function properties.
if (should_transform && !has_function_literal) {
- TransformToFastProperties(boilerplate,
- boilerplate->map()->unused_property_fields());
+ JSObject::TransformToFastProperties(
+ boilerplate, boilerplate->map()->unused_property_fields());
}
return boilerplate;
}
+MaybeObject* TransitionElements(Handle<Object> object,
+ ElementsKind to_kind,
+ Isolate* isolate) {
+ HandleScope scope(isolate);
+ if (!object->IsJSObject()) return isolate->ThrowIllegalOperation();
+ ElementsKind from_kind =
+ Handle<JSObject>::cast(object)->map()->elements_kind();
+ if (Map::IsValidElementsTransition(from_kind, to_kind)) {
+ Handle<Object> result = JSObject::TransitionElementsKind(
+ Handle<JSObject>::cast(object), to_kind);
+ if (result.is_null()) return isolate->ThrowIllegalOperation();
+ return *result;
+ }
+ return isolate->ThrowIllegalOperation();
+}
+
+
static const int kSmiOnlyLiteralMinimumLength = 1024;
-static Handle<Object> CreateArrayLiteralBoilerplate(
+Handle<Object> Runtime::CreateArrayLiteralBoilerplate(
Isolate* isolate,
Handle<FixedArray> literals,
Handle<FixedArray> elements) {
@@ -450,25 +470,13 @@ static Handle<Object> CreateArrayLiteralBoilerplate(
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(elements->get(1)));
- ASSERT(FLAG_smi_only_arrays || constant_elements_kind == FAST_ELEMENTS ||
- constant_elements_kind == FAST_SMI_ONLY_ELEMENTS);
- bool allow_literal_kind_transition = FLAG_smi_only_arrays &&
- constant_elements_kind > object->GetElementsKind();
-
- if (!FLAG_smi_only_arrays &&
- constant_elements_values->length() > kSmiOnlyLiteralMinimumLength &&
- constant_elements_kind != object->GetElementsKind()) {
- allow_literal_kind_transition = true;
- }
-
- // If the ElementsKind of the constant values of the array literal are less
- // specific than the ElementsKind of the boilerplate array object, change the
- // boilerplate array object's map to reflect that kind.
- if (allow_literal_kind_transition) {
- Handle<Map> transitioned_array_map =
- isolate->factory()->GetElementsTransitionMap(object,
- constant_elements_kind);
- object->set_map(*transitioned_array_map);
+ Context* global_context = isolate->context()->global_context();
+ if (constant_elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ object->set_map(Map::cast(global_context->smi_js_array_map()));
+ } else if (constant_elements_kind == FAST_DOUBLE_ELEMENTS) {
+ object->set_map(Map::cast(global_context->double_js_array_map()));
+ } else {
+ object->set_map(Map::cast(global_context->object_js_array_map()));
}
Handle<FixedArrayBase> copied_elements_values;
@@ -513,6 +521,16 @@ static Handle<Object> CreateArrayLiteralBoilerplate(
}
object->set_elements(*copied_elements_values);
object->set_length(Smi::FromInt(copied_elements_values->length()));
+
+ // Ensure that the boilerplate object has FAST_ELEMENTS, unless the flag is
+ // on or the object is larger than the threshold.
+ if (!FLAG_smi_only_arrays &&
+ constant_elements_values->length() < kSmiOnlyLiteralMinimumLength) {
+ if (object->GetElementsKind() != FAST_ELEMENTS) {
+ CHECK(!TransitionElements(object, FAST_ELEMENTS, isolate)->IsFailure());
+ }
+ }
+
return object;
}
@@ -537,7 +555,8 @@ static Handle<Object> CreateLiteralBoilerplate(
false,
kHasNoFunctionLiteral);
case CompileTimeValue::ARRAY_LITERAL:
- return CreateArrayLiteralBoilerplate(isolate, literals, elements);
+ return Runtime::CreateArrayLiteralBoilerplate(
+ isolate, literals, elements);
default:
UNREACHABLE();
return Handle<Object>::null();
@@ -545,34 +564,12 @@ static Handle<Object> CreateLiteralBoilerplate(
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralBoilerplate) {
- // Takes a FixedArray of elements containing the literal elements of
- // the array literal and produces JSArray with those elements.
- // Additionally takes the literals array of the surrounding function
- // which contains the context from which to get the Array function
- // to use for creating the array literal.
- HandleScope scope(isolate);
- ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
- CONVERT_SMI_ARG_CHECKED(literals_index, 1);
- CONVERT_ARG_CHECKED(FixedArray, elements, 2);
-
- Handle<Object> object =
- CreateArrayLiteralBoilerplate(isolate, literals, elements);
- if (object.is_null()) return Failure::Exception();
-
- // Update the functions literal and return the boilerplate.
- literals->set(literals_index, *object);
- return *object;
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectLiteral) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
CONVERT_SMI_ARG_CHECKED(literals_index, 1);
- CONVERT_ARG_CHECKED(FixedArray, constant_properties, 2);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, constant_properties, 2);
CONVERT_SMI_ARG_CHECKED(flags, 3);
bool should_have_fast_elements = (flags & ObjectLiteral::kFastElements) != 0;
bool has_function_literal = (flags & ObjectLiteral::kHasFunction) != 0;
@@ -596,9 +593,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectLiteral) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectLiteralShallow) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
CONVERT_SMI_ARG_CHECKED(literals_index, 1);
- CONVERT_ARG_CHECKED(FixedArray, constant_properties, 2);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, constant_properties, 2);
CONVERT_SMI_ARG_CHECKED(flags, 3);
bool should_have_fast_elements = (flags & ObjectLiteral::kFastElements) != 0;
bool has_function_literal = (flags & ObjectLiteral::kHasFunction) != 0;
@@ -622,14 +619,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateObjectLiteralShallow) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteral) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
CONVERT_SMI_ARG_CHECKED(literals_index, 1);
- CONVERT_ARG_CHECKED(FixedArray, elements, 2);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, elements, 2);
// Check if boilerplate exists. If not, create it first.
Handle<Object> boilerplate(literals->get(literals_index), isolate);
if (*boilerplate == isolate->heap()->undefined_value()) {
- boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements);
+ boilerplate =
+ Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements);
if (boilerplate.is_null()) return Failure::Exception();
// Update the functions literal and return the boilerplate.
literals->set(literals_index, *boilerplate);
@@ -641,14 +639,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteral) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
CONVERT_SMI_ARG_CHECKED(literals_index, 1);
- CONVERT_ARG_CHECKED(FixedArray, elements, 2);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, elements, 2);
// Check if boilerplate exists. If not, create it first.
Handle<Object> boilerplate(literals->get(literals_index), isolate);
if (*boilerplate == isolate->heap()->undefined_value()) {
- boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements);
+ ASSERT(*elements != isolate->heap()->empty_fixed_array());
+ boilerplate =
+ Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements);
if (boilerplate.is_null()) return Failure::Exception();
// Update the functions literal and return the boilerplate.
literals->set(literals_index, *boilerplate);
@@ -700,28 +700,28 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsJSFunctionProxy) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetHandler) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSProxy, proxy, args[0]);
+ CONVERT_ARG_CHECKED(JSProxy, proxy, 0);
return proxy->handler();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetCallTrap) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunctionProxy, proxy, args[0]);
+ CONVERT_ARG_CHECKED(JSFunctionProxy, proxy, 0);
return proxy->call_trap();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetConstructTrap) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunctionProxy, proxy, args[0]);
+ CONVERT_ARG_CHECKED(JSFunctionProxy, proxy, 0);
return proxy->construct_trap();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSProxy, proxy, args[0]);
+ CONVERT_ARG_CHECKED(JSProxy, proxy, 0);
proxy->Fix();
return isolate->heap()->undefined_value();
}
@@ -730,7 +730,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSSet, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
Handle<ObjectHashSet> table = isolate->factory()->NewObjectHashSet(0);
holder->set_table(*table);
return *holder;
@@ -740,7 +740,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetInitialize) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSSet, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
Handle<Object> key(args[1]);
Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
table = ObjectHashSetAdd(table, key);
@@ -752,7 +752,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetAdd) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHas) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSSet, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
Handle<Object> key(args[1]);
Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
return isolate->heap()->ToBoolean(table->Contains(*key));
@@ -762,7 +762,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHas) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSSet, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSSet, holder, 0);
Handle<Object> key(args[1]);
Handle<ObjectHashSet> table(ObjectHashSet::cast(holder->table()));
table = ObjectHashSetRemove(table, key);
@@ -774,7 +774,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDelete) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSMap, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
holder->set_table(*table);
return *holder;
@@ -784,7 +784,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MapInitialize) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSMap, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
Handle<Object> key(args[1]);
return ObjectHashTable::cast(holder->table())->Lookup(*key);
}
@@ -793,7 +793,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MapGet) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSMap, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSMap, holder, 0);
Handle<Object> key(args[1]);
Handle<Object> value(args[2]);
Handle<ObjectHashTable> table(ObjectHashTable::cast(holder->table()));
@@ -806,7 +806,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MapSet) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
ASSERT(weakmap->map()->inobject_properties() == 0);
Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
weakmap->set_table(*table);
@@ -818,8 +818,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapGet) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
- CONVERT_ARG_CHECKED(JSReceiver, key, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, key, 1);
return ObjectHashTable::cast(weakmap->table())->Lookup(*key);
}
@@ -827,8 +827,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapGet) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapSet) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
- CONVERT_ARG_CHECKED(JSReceiver, key, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSWeakMap, weakmap, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, key, 1);
Handle<Object> value(args[2]);
Handle<ObjectHashTable> table(ObjectHashTable::cast(weakmap->table()));
Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
@@ -849,7 +849,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ClassOf) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSReceiver, input_obj, args[0]);
+ CONVERT_ARG_CHECKED(JSReceiver, input_obj, 0);
Object* obj = input_obj;
// We don't expect access checks to be needed on JSProxy objects.
ASSERT(!obj->IsAccessCheckNeeded() || obj->IsJSObject());
@@ -883,57 +883,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsInPrototypeChain) {
}
-// Inserts an object as the hidden prototype of another object.
-RUNTIME_FUNCTION(MaybeObject*, Runtime_SetHiddenPrototype) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSObject, jsobject, args[0]);
- CONVERT_CHECKED(JSObject, proto, args[1]);
-
- // Sanity checks. The old prototype (that we are replacing) could
- // theoretically be null, but if it is not null then check that we
- // didn't already install a hidden prototype here.
- RUNTIME_ASSERT(!jsobject->GetPrototype()->IsHeapObject() ||
- !HeapObject::cast(jsobject->GetPrototype())->map()->is_hidden_prototype());
- RUNTIME_ASSERT(!proto->map()->is_hidden_prototype());
-
- // Allocate up front before we start altering state in case we get a GC.
- Object* map_or_failure;
- { MaybeObject* maybe_map_or_failure = proto->map()->CopyDropTransitions();
- if (!maybe_map_or_failure->ToObject(&map_or_failure)) {
- return maybe_map_or_failure;
- }
- }
- Map* new_proto_map = Map::cast(map_or_failure);
-
- { MaybeObject* maybe_map_or_failure = jsobject->map()->CopyDropTransitions();
- if (!maybe_map_or_failure->ToObject(&map_or_failure)) {
- return maybe_map_or_failure;
- }
- }
- Map* new_map = Map::cast(map_or_failure);
-
- // Set proto's prototype to be the old prototype of the object.
- new_proto_map->set_prototype(jsobject->GetPrototype());
- proto->set_map(new_proto_map);
- new_proto_map->set_is_hidden_prototype();
-
- // Set the object's prototype to proto.
- new_map->set_prototype(proto);
- jsobject->set_map(new_map);
-
- return isolate->heap()->undefined_value();
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_IsConstructCall) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 0);
- JavaScriptFrameIterator it(isolate);
- return isolate->heap()->ToBoolean(it.frame()->IsConstructor());
-}
-
-
// Recursively traverses hidden prototypes if property is not found
static void GetOwnPropertyImplementation(JSObject* obj,
String* name,
@@ -1047,23 +996,14 @@ enum PropertyDescriptorIndices {
DESCRIPTOR_SIZE
};
-// Returns an array with the property description:
-// if args[1] is not a property on args[0]
-// returns undefined
-// if args[1] is a data property on args[0]
-// [false, value, Writeable, Enumerable, Configurable]
-// if args[1] is an accessor on args[0]
-// [true, GetFunction, SetFunction, Enumerable, Configurable]
-RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
- ASSERT(args.length() == 2);
+
+static MaybeObject* GetOwnProperty(Isolate* isolate,
+ Handle<JSObject> obj,
+ Handle<String> name) {
Heap* heap = isolate->heap();
- HandleScope scope(isolate);
Handle<FixedArray> elms = isolate->factory()->NewFixedArray(DESCRIPTOR_SIZE);
Handle<JSArray> desc = isolate->factory()->NewJSArrayWithElements(elms);
LookupResult result(isolate);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
-
// This could be an element.
uint32_t index;
if (name->AsArrayIndex(&index)) {
@@ -1083,7 +1023,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
elms->set(IS_ACCESSOR_INDEX, heap->false_value());
elms->set(VALUE_INDEX, *substr);
elms->set(WRITABLE_INDEX, heap->false_value());
- elms->set(ENUMERABLE_INDEX, heap->false_value());
+ elms->set(ENUMERABLE_INDEX, heap->true_value());
elms->set(CONFIGURABLE_INDEX, heap->false_value());
return *desc;
}
@@ -1109,26 +1049,26 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
holder = Handle<JSObject>(JSObject::cast(proto));
}
FixedArray* elements = FixedArray::cast(holder->elements());
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
if (elements->map() == heap->non_strict_arguments_elements_map()) {
- dictionary = NumberDictionary::cast(elements->get(1));
+ dictionary = SeededNumberDictionary::cast(elements->get(1));
} else {
- dictionary = NumberDictionary::cast(elements);
+ dictionary = SeededNumberDictionary::cast(elements);
}
int entry = dictionary->FindEntry(index);
- ASSERT(entry != NumberDictionary::kNotFound);
+ ASSERT(entry != SeededNumberDictionary::kNotFound);
PropertyDetails details = dictionary->DetailsAt(entry);
switch (details.type()) {
case CALLBACKS: {
// This is an accessor property with getter and/or setter.
- FixedArray* callbacks =
- FixedArray::cast(dictionary->ValueAt(entry));
+ AccessorPair* accessors =
+ AccessorPair::cast(dictionary->ValueAt(entry));
elms->set(IS_ACCESSOR_INDEX, heap->true_value());
if (CheckElementAccess(*obj, index, v8::ACCESS_GET)) {
- elms->set(GETTER_INDEX, callbacks->get(0));
+ elms->set(GETTER_INDEX, accessors->GetComponent(ACCESSOR_GETTER));
}
if (CheckElementAccess(*obj, index, v8::ACCESS_SET)) {
- elms->set(SETTER_INDEX, callbacks->get(1));
+ elms->set(SETTER_INDEX, accessors->GetComponent(ACCESSOR_SETTER));
}
break;
}
@@ -1167,18 +1107,18 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
elms->set(CONFIGURABLE_INDEX, heap->ToBoolean(!result.IsDontDelete()));
bool is_js_accessor = (result.type() == CALLBACKS) &&
- (result.GetCallbackObject()->IsFixedArray());
+ (result.GetCallbackObject()->IsAccessorPair());
if (is_js_accessor) {
// __defineGetter__/__defineSetter__ callback.
elms->set(IS_ACCESSOR_INDEX, heap->true_value());
- FixedArray* structure = FixedArray::cast(result.GetCallbackObject());
+ AccessorPair* accessors = AccessorPair::cast(result.GetCallbackObject());
if (CheckAccess(*obj, *name, &result, v8::ACCESS_GET)) {
- elms->set(GETTER_INDEX, structure->get(0));
+ elms->set(GETTER_INDEX, accessors->GetComponent(ACCESSOR_GETTER));
}
if (CheckAccess(*obj, *name, &result, v8::ACCESS_SET)) {
- elms->set(SETTER_INDEX, structure->get(1));
+ elms->set(SETTER_INDEX, accessors->GetComponent(ACCESSOR_SETTER));
}
} else {
elms->set(IS_ACCESSOR_INDEX, heap->false_value());
@@ -1197,16 +1137,32 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
}
+// Returns an array with the property description:
+// if args[1] is not a property on args[0]
+// returns undefined
+// if args[1] is a data property on args[0]
+// [false, value, Writeable, Enumerable, Configurable]
+// if args[1] is an accessor on args[0]
+// [true, GetFunction, SetFunction, Enumerable, Configurable]
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOwnProperty) {
+ ASSERT(args.length() == 2);
+ HandleScope scope(isolate);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
+ return GetOwnProperty(isolate, obj, name);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_PreventExtensions) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSObject, obj, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, obj, 0);
return obj->PreventExtensions();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_IsExtensible) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSObject, obj, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, obj, 0);
if (obj->IsJSGlobalProxy()) {
Object* proto = obj->GetPrototype();
if (proto->IsNull()) return isolate->heap()->false_value();
@@ -1220,9 +1176,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsExtensible) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpCompile) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSRegExp, re, 0);
- CONVERT_ARG_CHECKED(String, pattern, 1);
- CONVERT_ARG_CHECKED(String, flags, 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSRegExp, re, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, pattern, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, flags, 2);
Handle<Object> result = RegExpImpl::Compile(re, pattern, flags);
if (result.is_null()) return Failure::Exception();
return *result;
@@ -1232,7 +1188,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpCompile) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateApiFunction) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(FunctionTemplateInfo, data, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FunctionTemplateInfo, data, 0);
return *isolate->factory()->CreateApiFunction(data);
}
@@ -1247,9 +1203,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsTemplate) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetTemplateField) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(HeapObject, templ, args[0]);
- CONVERT_CHECKED(Smi, field, args[1]);
- int index = field->value();
+ CONVERT_ARG_CHECKED(HeapObject, templ, 0);
+ CONVERT_SMI_ARG_CHECKED(index, 1)
int offset = index * kPointerSize + HeapObject::kHeaderSize;
InstanceType type = templ->map()->instance_type();
RUNTIME_ASSERT(type == FUNCTION_TEMPLATE_INFO_TYPE ||
@@ -1266,7 +1221,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetTemplateField) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(HeapObject, object, args[0]);
+ CONVERT_ARG_CHECKED(HeapObject, object, 0);
Map* old_map = object->map();
bool needs_access_checks = old_map->is_access_check_needed();
if (needs_access_checks) {
@@ -1285,7 +1240,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DisableAccessChecks) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_EnableAccessChecks) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(HeapObject, object, args[0]);
+ CONVERT_ARG_CHECKED(HeapObject, object, 0);
Map* old_map = object->map();
if (!old_map->is_access_check_needed()) {
// Copy map so it won't interfere constructor's initial map.
@@ -1317,23 +1272,19 @@ static Failure* ThrowRedeclarationError(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
ASSERT(args.length() == 3);
HandleScope scope(isolate);
+ Handle<GlobalObject> global = Handle<GlobalObject>(
+ isolate->context()->global());
Handle<Context> context = args.at<Context>(0);
- CONVERT_ARG_CHECKED(FixedArray, pairs, 1);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, pairs, 1);
CONVERT_SMI_ARG_CHECKED(flags, 2);
- Handle<JSObject> js_global = Handle<JSObject>(isolate->context()->global());
- Handle<JSObject> qml_global = Handle<JSObject>(isolate->context()->qml_global());
-
// Traverse the name/value pairs and set the properties.
int length = pairs->length();
- for (int i = 0; i < length; i += 3) {
+ for (int i = 0; i < length; i += 2) {
HandleScope scope(isolate);
Handle<String> name(String::cast(pairs->get(i)));
Handle<Object> value(pairs->get(i + 1), isolate);
- Handle<Smi> is_qml_global(Smi::cast(pairs->get(i + 2)));
-
- Handle<JSObject> global = is_qml_global->value()?qml_global:js_global;
// We have to declare a global const property. To capture we only
// assign to it when evaluating the assignment for "const x =
@@ -1344,7 +1295,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
// Lookup the property in the global object, and don't set the
// value of the variable if the property is already there.
LookupResult lookup(isolate);
- global->Lookup(*name, &lookup, true);
+ global->Lookup(*name, &lookup);
if (lookup.IsProperty()) {
// We found an existing property. Unless it was an interceptor
// that claims the property is absent, skip this declaration.
@@ -1371,7 +1322,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
}
LookupResult lookup(isolate);
- global->LocalLookup(*name, &lookup, true);
+ global->LocalLookup(*name, &lookup);
// Compute the property attributes. According to ECMA-262, section
// 13, page 71, the property must be read-only and
@@ -1386,6 +1337,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
attr |= READ_ONLY;
}
+ LanguageMode language_mode = DeclareGlobalsLanguageMode::decode(flags);
+
// Safari does not allow the invocation of callback setters for
// function declarations. To mimic this behavior, we do not allow
// the invocation of setters for function values. This makes a
@@ -1393,27 +1346,33 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareGlobals) {
// handlers such as "function onload() {}". Firefox does call the
// onload setter in those case and Safari does not. We follow
// Safari for compatibility.
- if (value->IsJSFunction()) {
- // Do not change DONT_DELETE to false from true.
+ if (is_function_declaration) {
if (lookup.IsProperty() && (lookup.type() != INTERCEPTOR)) {
+ // Do not overwrite READ_ONLY properties.
+ if (lookup.GetAttributes() & READ_ONLY) {
+ if (language_mode != CLASSIC_MODE) {
+ Handle<Object> args[] = { name };
+ return isolate->Throw(*isolate->factory()->NewTypeError(
+ "strict_cannot_assign", HandleVector(args, ARRAY_SIZE(args))));
+ }
+ continue;
+ }
+ // Do not change DONT_DELETE to false from true.
attr |= lookup.GetAttributes() & DONT_DELETE;
}
PropertyAttributes attributes = static_cast<PropertyAttributes>(attr);
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetLocalPropertyIgnoreAttributes(global,
- name,
- value,
- attributes));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate,
+ JSObject::SetLocalPropertyIgnoreAttributes(global, name, value,
+ attributes));
} else {
- StrictModeFlag strict_mode = DeclareGlobalsStrictModeFlag::decode(flags);
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetProperty(global,
- name,
- value,
- static_cast<PropertyAttributes>(attr),
- strict_mode,
- true));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate,
+ JSReceiver::SetProperty(global, name, value,
+ static_cast<PropertyAttributes>(attr),
+ language_mode == CLASSIC_MODE
+ ? kNonStrictMode : kStrictMode));
}
}
@@ -1467,7 +1426,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
Handle<JSObject> object = Handle<JSObject>::cast(holder);
RETURN_IF_EMPTY_HANDLE(
isolate,
- SetProperty(object, name, initial_value, mode, kNonStrictMode));
+ JSReceiver::SetProperty(object, name, initial_value, mode,
+ kNonStrictMode));
}
}
@@ -1503,13 +1463,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
!object->IsJSContextExtensionObject()) {
LookupResult lookup(isolate);
object->Lookup(*name, &lookup);
- if (lookup.IsProperty() && (lookup.type() == CALLBACKS)) {
+ if (lookup.IsFound() && (lookup.type() == CALLBACKS)) {
return ThrowRedeclarationError(isolate, "const", name);
}
}
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetProperty(object, name, value, mode,
- kNonStrictMode));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate,
+ JSReceiver::SetProperty(object, name, value, mode, kNonStrictMode));
}
return isolate->heap()->undefined_value();
@@ -1519,23 +1479,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeclareContextSlot) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
NoHandleAllocation nha;
// args[0] == name
- // args[1] == strict_mode
- // args[2] == qml_mode
- // args[3] == value (optional)
+ // args[1] == language_mode
+ // args[2] == value (optional)
// Determine if we need to assign to the variable if it already
// exists (based on the number of arguments).
- RUNTIME_ASSERT(args.length() == 3 || args.length() == 4);
- bool assign = args.length() == 4;
+ RUNTIME_ASSERT(args.length() == 2 || args.length() == 3);
+ bool assign = args.length() == 3;
- CONVERT_ARG_CHECKED(String, name, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
+ GlobalObject* global = isolate->context()->global();
RUNTIME_ASSERT(args[1]->IsSmi());
- CONVERT_STRICT_MODE_ARG(strict_mode, 1);
-
- RUNTIME_ASSERT(args[2]->IsSmi());
- int qml_mode = Smi::cast(args[2])->value();
-
- JSObject* global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ CONVERT_LANGUAGE_MODE_ARG(language_mode, 1);
+ StrictModeFlag strict_mode_flag = (language_mode == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
// According to ECMA-262, section 12.2, page 62, the property must
// not be deletable.
@@ -1553,8 +1510,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
while (object->IsJSObject() &&
JSObject::cast(object)->map()->is_hidden_prototype()) {
JSObject* raw_holder = JSObject::cast(object);
- raw_holder->LocalLookup(*name, &lookup, true);
- if (lookup.IsProperty() && lookup.type() == INTERCEPTOR) {
+ raw_holder->LocalLookup(*name, &lookup);
+ if (lookup.IsFound() && lookup.type() == INTERCEPTOR) {
HandleScope handle_scope(isolate);
Handle<JSObject> holder(raw_holder);
PropertyAttributes intercepted = holder->GetPropertyAttribute(*name);
@@ -1564,7 +1521,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
// Found an interceptor that's not read only.
if (assign) {
return raw_holder->SetProperty(
- &lookup, *name, args[3], attributes, strict_mode);
+ &lookup, *name, args[2], attributes, strict_mode_flag);
} else {
return isolate->heap()->undefined_value();
}
@@ -1574,9 +1531,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeVarGlobal) {
}
// Reload global in case the loop above performed a GC.
- global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ global = isolate->context()->global();
if (assign) {
- return global->SetProperty(*name, args[3], attributes, strict_mode, true);
+ return global->SetProperty(*name, args[2], attributes, strict_mode_flag);
}
return isolate->heap()->undefined_value();
}
@@ -1586,15 +1543,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
// All constants are declared with an initial value. The name
// of the constant is the first argument and the initial value
// is the second.
- RUNTIME_ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(String, name, 0);
+ RUNTIME_ASSERT(args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 0);
Handle<Object> value = args.at<Object>(1);
- RUNTIME_ASSERT(args[2]->IsSmi());
- int qml_mode = Smi::cast(args[2])->value();
-
// Get the current global object from top.
- JSObject* global = qml_mode?isolate->context()->qml_global():isolate->context()->global();
+ GlobalObject* global = isolate->context()->global();
// According to ECMA-262, section 12.2, page 62, the property must
// not be deletable. Since it's a const, it must be READ_ONLY too.
@@ -1618,18 +1572,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstGlobal) {
// Restore global object from context (in case of GC) and continue
// with setting the value.
HandleScope handle_scope(isolate);
- Handle<JSObject> global(qml_mode?isolate->context()->qml_global():isolate->context()->global());
+ Handle<GlobalObject> global(isolate->context()->global());
// BUG 1213575: Handle the case where we have to set a read-only
// property through an interceptor and only do it if it's
// uninitialized, e.g. the hole. Nirk...
// Passing non-strict mode because the property is writable.
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetProperty(global,
- name,
- value,
- attributes,
- kNonStrictMode));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate,
+ JSReceiver::SetProperty(global, name, value, attributes,
+ kNonStrictMode));
return *value;
}
@@ -1699,7 +1651,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
// Strict mode not needed (const disallowed in strict mode).
RETURN_IF_EMPTY_HANDLE(
isolate,
- SetProperty(global, name, value, NONE, kNonStrictMode));
+ JSReceiver::SetProperty(global, name, value, NONE, kNonStrictMode));
return *value;
}
@@ -1725,7 +1677,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
// GetProperty() to get the current value as it 'unholes' the value.
LookupResult lookup(isolate);
object->LocalLookupRealNamedProperty(*name, &lookup);
- ASSERT(lookup.IsProperty()); // the property was declared
+ ASSERT(lookup.IsFound()); // the property was declared
ASSERT(lookup.IsReadOnly()); // and it was declared as read-only
PropertyType type = lookup.type();
@@ -1751,7 +1703,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_InitializeConstContextSlot) {
// Strict mode not needed (const disallowed in strict mode).
RETURN_IF_EMPTY_HANDLE(
isolate,
- SetProperty(object, name, value, attributes, kNonStrictMode));
+ JSReceiver::SetProperty(object, name, value, attributes,
+ kNonStrictMode));
}
}
@@ -1763,10 +1716,10 @@ RUNTIME_FUNCTION(MaybeObject*,
Runtime_OptimizeObjectForAddingMultipleProperties) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
CONVERT_SMI_ARG_CHECKED(properties, 1);
if (object->HasFastProperties()) {
- NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, properties);
+ JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, properties);
}
return *object;
}
@@ -1775,12 +1728,12 @@ RUNTIME_FUNCTION(MaybeObject*,
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
- CONVERT_ARG_CHECKED(String, subject, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 1);
// Due to the way the JS calls are constructed this must be less than the
// length of a string, i.e. it is always a Smi. We check anyway for security.
CONVERT_SMI_ARG_CHECKED(index, 2);
- CONVERT_ARG_CHECKED(JSArray, last_match_info, 3);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 3);
RUNTIME_ASSERT(last_match_info->HasFastElements());
RUNTIME_ASSERT(index >= 0);
RUNTIME_ASSERT(index <= subject->length());
@@ -1832,8 +1785,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpConstructResult) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) {
AssertNoAllocation no_alloc;
ASSERT(args.length() == 5);
- CONVERT_CHECKED(JSRegExp, regexp, args[0]);
- CONVERT_CHECKED(String, source, args[1]);
+ CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
+ CONVERT_ARG_CHECKED(String, source, 1);
Object* global = args[2];
if (!global->IsTrue()) global = isolate->heap()->false_value();
@@ -1850,11 +1803,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) {
JSFunction::cast(constructor)->initial_map() == map) {
// If we still have the original map, set in-object properties directly.
regexp->InObjectPropertyAtPut(JSRegExp::kSourceFieldIndex, source);
- // TODO(lrn): Consider skipping write barrier on booleans as well.
- // Both true and false should be in oldspace at all times.
- regexp->InObjectPropertyAtPut(JSRegExp::kGlobalFieldIndex, global);
- regexp->InObjectPropertyAtPut(JSRegExp::kIgnoreCaseFieldIndex, ignoreCase);
- regexp->InObjectPropertyAtPut(JSRegExp::kMultilineFieldIndex, multiline);
+ // Both true and false are immovable immortal objects so no need for write
+ // barrier.
+ regexp->InObjectPropertyAtPut(
+ JSRegExp::kGlobalFieldIndex, global, SKIP_WRITE_BARRIER);
+ regexp->InObjectPropertyAtPut(
+ JSRegExp::kIgnoreCaseFieldIndex, ignoreCase, SKIP_WRITE_BARRIER);
+ regexp->InObjectPropertyAtPut(
+ JSRegExp::kMultilineFieldIndex, multiline, SKIP_WRITE_BARRIER);
regexp->InObjectPropertyAtPut(JSRegExp::kLastIndexFieldIndex,
Smi::FromInt(0),
SKIP_WRITE_BARRIER); // It's a Smi.
@@ -1898,7 +1854,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpInitializeObject) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FinishArrayPrototypeSetup) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSArray, prototype, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, prototype, 0);
// This is necessary to enable fast checks for absence of elements
// on Array.prototype and below.
prototype->set_elements(isolate->heap()->empty_fixed_array());
@@ -1919,7 +1875,7 @@ static Handle<JSFunction> InstallBuiltin(Isolate* isolate,
code,
false);
optimized->shared()->DontAdaptArguments();
- SetProperty(holder, key, optimized, NONE, kStrictMode);
+ JSReceiver::SetProperty(holder, key, optimized, NONE, kStrictMode);
return optimized;
}
@@ -1927,7 +1883,7 @@ static Handle<JSFunction> InstallBuiltin(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_SpecialArrayFunctions) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSObject, holder, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, holder, 0);
InstallBuiltin(isolate, holder, "pop", Builtins::kArrayPop);
InstallBuiltin(isolate, holder, "push", Builtins::kArrayPush);
@@ -1942,11 +1898,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SpecialArrayFunctions) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) {
- NoHandleAllocation handle_free;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, function, args[0]);
+ CONVERT_ARG_CHECKED(JSReceiver, callable, 0);
+
+ if (!callable->IsJSFunction()) {
+ HandleScope scope(isolate);
+ bool threw = false;
+ Handle<Object> delegate =
+ Execution::TryGetFunctionDelegate(Handle<JSReceiver>(callable), &threw);
+ if (threw) return Failure::Exception();
+ callable = JSFunction::cast(*delegate);
+ }
+ JSFunction* function = JSFunction::cast(callable);
+
SharedFunctionInfo* shared = function->shared();
- if (shared->native() || shared->strict_mode()) {
+ if (shared->native() || !shared->is_classic_mode()) {
return isolate->heap()->undefined_value();
}
// Returns undefined for strict or native functions, or
@@ -1961,7 +1927,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_MaterializeRegExpLiteral) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(FixedArray, literals, 0);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 0);
int index = args.smi_at(1);
Handle<String> pattern = args.at<String>(2);
Handle<String> flags = args.at<String>(3);
@@ -1992,7 +1958,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetName) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
return f->shared()->name();
}
@@ -2001,8 +1967,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetName) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSFunction, f, args[0]);
- CONVERT_CHECKED(String, name, args[1]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
+ CONVERT_ARG_CHECKED(String, name, 1);
f->shared()->set_name(name);
return isolate->heap()->undefined_value();
}
@@ -2011,7 +1977,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetName) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionNameShouldPrintAsAnonymous) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
return isolate->heap()->ToBoolean(
f->shared()->name_should_print_as_anonymous());
}
@@ -2020,7 +1986,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionNameShouldPrintAsAnonymous) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionMarkNameShouldPrintAsAnonymous) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
f->shared()->set_name_should_print_as_anonymous(true);
return isolate->heap()->undefined_value();
}
@@ -2030,7 +1996,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionRemovePrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
Object* obj = f->RemovePrototype();
if (obj->IsFailure()) return obj;
@@ -2042,7 +2008,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, fun, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, fun, 0);
Handle<Object> script = Handle<Object>(fun->shared()->script(), isolate);
if (!script->IsScript()) return isolate->heap()->undefined_value();
@@ -2051,11 +2017,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScript) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetSourceCode) {
- NoHandleAllocation ha;
+ HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
- return f->shared()->GetSourceCode();
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, f, 0);
+ Handle<SharedFunctionInfo> shared(f->shared());
+ return *shared->GetSourceCode();
}
@@ -2063,7 +2030,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScriptSourcePosition) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, fun, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, fun, 0);
int pos = fun->shared()->start_position();
return Smi::FromInt(pos);
}
@@ -2072,7 +2039,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetScriptSourcePosition) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetPositionForOffset) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(Code, code, args[0]);
+ CONVERT_ARG_CHECKED(Code, code, 0);
CONVERT_NUMBER_CHECKED(int, offset, Int32, args[1]);
RUNTIME_ASSERT(0 <= offset && offset < code->Size());
@@ -2086,8 +2053,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetInstanceClassName) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSFunction, fun, args[0]);
- CONVERT_CHECKED(String, name, args[1]);
+ CONVERT_ARG_CHECKED(JSFunction, fun, 0);
+ CONVERT_ARG_CHECKED(String, name, 1);
fun->SetInstanceClassName(name);
return isolate->heap()->undefined_value();
}
@@ -2097,10 +2064,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetLength) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSFunction, fun, args[0]);
- CONVERT_CHECKED(Smi, length, args[1]);
- fun->shared()->set_length(length->value());
- return length;
+ CONVERT_ARG_CHECKED(JSFunction, fun, 0);
+ CONVERT_SMI_ARG_CHECKED(length, 1);
+ fun->shared()->set_length(length);
+ return isolate->heap()->undefined_value();
}
@@ -2108,7 +2075,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSFunction, fun, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, fun, 0);
ASSERT(fun->should_have_prototype());
Object* obj;
{ MaybeObject* maybe_obj =
@@ -2122,7 +2089,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, function, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, function, 0);
MaybeObject* maybe_name =
isolate->heap()->AllocateStringFromAscii(CStrVector("prototype"));
@@ -2178,7 +2145,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
return isolate->heap()->ToBoolean(f->shared()->IsApiFunction());
}
@@ -2187,7 +2154,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsBuiltin) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
return isolate->heap()->ToBoolean(f->IsBuiltin());
}
@@ -2196,7 +2163,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSFunction, target, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, target, 0);
Handle<Object> code = args.at<Object>(1);
Handle<Context> context(target->context());
@@ -2260,7 +2227,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetExpectedNumberOfProperties) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
CONVERT_SMI_ARG_CHECKED(num, 1);
RUNTIME_ASSERT(num >= 0);
SetExpectedNofProperties(function, num);
@@ -2284,7 +2251,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringCharCodeAt) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, subject, args[0]);
+ CONVERT_ARG_CHECKED(String, subject, 0);
Object* index = args[1];
RUNTIME_ASSERT(index->IsNumber());
@@ -3251,7 +3218,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
Address end_of_string = answer->address() + string_size;
isolate->heap()->CreateFillerObjectAt(end_of_string, delta);
if (Marking::IsBlack(Marking::MarkBitFrom(*answer))) {
- MemoryChunk::IncrementLiveBytes(answer->address(), -delta);
+ MemoryChunk::IncrementLiveBytesFromMutator(answer->address(), -delta);
}
return *answer;
@@ -3261,7 +3228,7 @@ MUST_USE_RESULT static MaybeObject* StringReplaceRegExpWithEmptyString(
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
ASSERT(args.length() == 4);
- CONVERT_CHECKED(String, subject, args[0]);
+ CONVERT_ARG_CHECKED(String, subject, 0);
if (!subject->IsFlat()) {
Object* flat_subject;
{ MaybeObject* maybe_flat_subject = subject->TryFlatten();
@@ -3272,7 +3239,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
subject = String::cast(flat_subject);
}
- CONVERT_CHECKED(String, replacement, args[2]);
+ CONVERT_ARG_CHECKED(String, replacement, 2);
if (!replacement->IsFlat()) {
Object* flat_replacement;
{ MaybeObject* maybe_flat_replacement = replacement->TryFlatten();
@@ -3283,8 +3250,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
replacement = String::cast(flat_replacement);
}
- CONVERT_CHECKED(JSRegExp, regexp, args[1]);
- CONVERT_CHECKED(JSArray, last_match_info, args[3]);
+ CONVERT_ARG_CHECKED(JSRegExp, regexp, 1);
+ CONVERT_ARG_CHECKED(JSArray, last_match_info, 3);
ASSERT(last_match_info->HasFastElements());
@@ -3306,6 +3273,79 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceRegExpWithString) {
}
+Handle<String> Runtime::StringReplaceOneCharWithString(Isolate* isolate,
+ Handle<String> subject,
+ Handle<String> search,
+ Handle<String> replace,
+ bool* found,
+ int recursion_limit) {
+ if (recursion_limit == 0) return Handle<String>::null();
+ if (subject->IsConsString()) {
+ ConsString* cons = ConsString::cast(*subject);
+ Handle<String> first = Handle<String>(cons->first());
+ Handle<String> second = Handle<String>(cons->second());
+ Handle<String> new_first =
+ StringReplaceOneCharWithString(isolate,
+ first,
+ search,
+ replace,
+ found,
+ recursion_limit - 1);
+ if (*found) return isolate->factory()->NewConsString(new_first, second);
+ if (new_first.is_null()) return new_first;
+
+ Handle<String> new_second =
+ StringReplaceOneCharWithString(isolate,
+ second,
+ search,
+ replace,
+ found,
+ recursion_limit - 1);
+ if (*found) return isolate->factory()->NewConsString(first, new_second);
+ if (new_second.is_null()) return new_second;
+
+ return subject;
+ } else {
+ int index = StringMatch(isolate, subject, search, 0);
+ if (index == -1) return subject;
+ *found = true;
+ Handle<String> first = isolate->factory()->NewSubString(subject, 0, index);
+ Handle<String> cons1 = isolate->factory()->NewConsString(first, replace);
+ Handle<String> second =
+ isolate->factory()->NewSubString(subject, index + 1, subject->length());
+ return isolate->factory()->NewConsString(cons1, second);
+ }
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_StringReplaceOneCharWithString) {
+ ASSERT(args.length() == 3);
+ HandleScope scope(isolate);
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, search, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, replace, 2);
+
+ // If the cons string tree is too deep, we simply abort the recursion and
+ // retry with a flattened subject string.
+ const int kRecursionLimit = 0x1000;
+ bool found = false;
+ Handle<String> result =
+ Runtime::StringReplaceOneCharWithString(isolate,
+ subject,
+ search,
+ replace,
+ &found,
+ kRecursionLimit);
+ if (!result.is_null()) return *result;
+ return *Runtime::StringReplaceOneCharWithString(isolate,
+ FlattenGetString(subject),
+ search,
+ replace,
+ &found,
+ kRecursionLimit);
+}
+
+
// Perform string match of pattern on subject, starting at start index.
// Caller must ensure that 0 <= start_index <= sub->length(),
// and should check that pat->length() + start_index <= sub->length().
@@ -3362,8 +3402,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringIndexOf) {
HandleScope scope(isolate); // create a new handle scope
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(String, sub, 0);
- CONVERT_ARG_CHECKED(String, pat, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, sub, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, pat, 1);
Object* index = args[2];
uint32_t start_index;
@@ -3414,8 +3454,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLastIndexOf) {
HandleScope scope(isolate); // create a new handle scope
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(String, sub, 0);
- CONVERT_ARG_CHECKED(String, pat, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, sub, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, pat, 1);
Object* index = args[2];
uint32_t start_index;
@@ -3473,8 +3513,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringLocaleCompare) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, str1, args[0]);
- CONVERT_CHECKED(String, str2, args[1]);
+ CONVERT_ARG_CHECKED(String, str1, 0);
+ CONVERT_ARG_CHECKED(String, str2, 1);
if (str1 == str2) return Smi::FromInt(0); // Equal.
int str1_length = str1->length();
@@ -3521,7 +3561,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SubString) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(String, value, args[0]);
+ CONVERT_ARG_CHECKED(String, value, 0);
int start, end;
// We have a fast integer-only case here to avoid a conversion to double in
// the common case where from and to are Smis.
@@ -3547,9 +3587,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SubString) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringMatch) {
ASSERT_EQ(3, args.length());
- CONVERT_ARG_CHECKED(String, subject, 0);
- CONVERT_ARG_CHECKED(JSRegExp, regexp, 1);
- CONVERT_ARG_CHECKED(JSArray, regexp_info, 2);
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, regexp_info, 2);
HandleScope handles;
Handle<Object> match = RegExpImpl::Exec(regexp, subject, 0, regexp_info);
@@ -3740,7 +3780,7 @@ static RegExpImpl::IrregexpResult SearchRegExpNoCaptureMultiple(
int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
int subject_length = subject->length();
bool first = true;
@@ -3813,7 +3853,7 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
int required_registers = RegExpImpl::IrregexpPrepare(regexp, subject);
if (required_registers < 0) return RegExpImpl::RE_EXCEPTION;
- OffsetsVector registers(required_registers);
+ OffsetsVector registers(required_registers, isolate);
Vector<int32_t> register_vector(registers.vector(), registers.length());
RegExpImpl::IrregexpResult result =
@@ -3832,7 +3872,7 @@ static RegExpImpl::IrregexpResult SearchRegExpMultiple(
if (result == RegExpImpl::RE_SUCCESS) {
// Need to keep a copy of the previous match for creating last_match_info
// at the end, so we have two vectors that we swap between.
- OffsetsVector registers2(required_registers);
+ OffsetsVector registers2(required_registers, isolate);
Vector<int> prev_register_vector(registers2.vector(), registers2.length());
bool first = true;
do {
@@ -3940,11 +3980,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExecMultiple) {
ASSERT(args.length() == 4);
HandleScope handles(isolate);
- CONVERT_ARG_CHECKED(String, subject, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 1);
if (!subject->IsFlat()) FlattenString(subject);
- CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
- CONVERT_ARG_CHECKED(JSArray, last_match_info, 2);
- CONVERT_ARG_CHECKED(JSArray, result_array, 3);
+ CONVERT_ARG_HANDLE_CHECKED(JSRegExp, regexp, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, last_match_info, 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, result_array, 3);
ASSERT(last_match_info->HasFastElements());
ASSERT(regexp->GetFlags().is_global());
@@ -4133,8 +4173,7 @@ MaybeObject* Runtime::GetElementOrCharAt(Isolate* isolate,
}
if (object->IsString() || object->IsNumber() || object->IsBoolean()) {
- Handle<Object> prototype = GetPrototype(object);
- return prototype->GetElement(index);
+ return object->GetPrototype()->GetElement(index);
}
return object->GetElement(index);
@@ -4193,23 +4232,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetProperty) {
}
-MaybeObject* TransitionElements(Handle<Object> object,
- ElementsKind to_kind,
- Isolate* isolate) {
- HandleScope scope(isolate);
- if (!object->IsJSObject()) return isolate->ThrowIllegalOperation();
- ElementsKind from_kind =
- Handle<JSObject>::cast(object)->map()->elements_kind();
- if (Map::IsValidElementsTransition(from_kind, to_kind)) {
- Handle<Object> result =
- TransitionElementsKind(Handle<JSObject>::cast(object), to_kind);
- if (result.is_null()) return isolate->ThrowIllegalOperation();
- return *result;
- }
- return isolate->ThrowIllegalOperation();
-}
-
-
// KeyedStringGetProperty is called from KeyedLoadIC::GenerateGeneric.
RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
NoHandleAllocation ha;
@@ -4247,7 +4269,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
// appropriate.
LookupResult result(isolate);
receiver->LocalLookup(key, &result);
- if (result.IsProperty() && result.type() == FIELD) {
+ if (result.IsFound() && result.type() == FIELD) {
int offset = result.GetFieldIndex();
keyed_lookup_cache->Update(receiver_map, key, offset);
return receiver->FastPropertyAt(offset);
@@ -4302,6 +4324,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
args.at<Object>(1));
}
+
+static bool IsValidAccessor(Handle<Object> obj) {
+ return obj->IsUndefined() || obj->IsSpecFunction() || obj->IsNull();
+}
+
+
// Implements part of 8.12.9 DefineOwnProperty.
// There are 3 cases that lead here:
// Step 4b - define a new accessor property.
@@ -4311,32 +4339,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) {
ASSERT(args.length() == 5);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
- CONVERT_CHECKED(String, name, args[1]);
- CONVERT_CHECKED(Smi, flag_setter, args[2]);
- Object* fun = args[3];
- RUNTIME_ASSERT(fun->IsSpecFunction() || fun->IsUndefined());
- CONVERT_CHECKED(Smi, flag_attr, args[4]);
- int unchecked = flag_attr->value();
- RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
RUNTIME_ASSERT(!obj->IsNull());
- LookupResult result(isolate);
- obj->LocalLookupRealNamedProperty(name, &result);
-
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, getter, 2);
+ RUNTIME_ASSERT(IsValidAccessor(getter));
+ CONVERT_ARG_HANDLE_CHECKED(Object, setter, 3);
+ RUNTIME_ASSERT(IsValidAccessor(setter));
+ CONVERT_SMI_ARG_CHECKED(unchecked, 4);
+ RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked);
- // If an existing property is either FIELD, NORMAL or CONSTANT_FUNCTION
- // delete it to avoid running into trouble in DefineAccessor, which
- // handles this incorrectly if the property is readonly (does nothing)
- if (result.IsProperty() &&
- (result.type() == FIELD || result.type() == NORMAL
- || result.type() == CONSTANT_FUNCTION)) {
- Object* ok;
- { MaybeObject* maybe_ok =
- obj->DeleteProperty(name, JSReceiver::NORMAL_DELETION);
- if (!maybe_ok->ToObject(&ok)) return maybe_ok;
- }
- }
- return obj->DefineAccessor(name, flag_setter->value() == 0, fun, attr);
+
+ bool fast = obj->HasFastProperties();
+ JSObject::DefineAccessor(obj, name, getter, setter, attr);
+ if (fast) JSObject::TransformToFastProperties(obj, 0);
+ return isolate->heap()->undefined_value();
}
// Implements part of 8.12.9 DefineOwnProperty.
@@ -4348,71 +4365,36 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
ASSERT(args.length() == 4);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSObject, js_object, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
- Handle<Object> obj_value = args.at<Object>(2);
-
- CONVERT_CHECKED(Smi, flag, args[3]);
- int unchecked = flag->value();
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, js_object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Object, obj_value, 2);
+ CONVERT_SMI_ARG_CHECKED(unchecked, 3);
RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
-
PropertyAttributes attr = static_cast<PropertyAttributes>(unchecked);
- // Check if this is an element.
- uint32_t index;
- bool is_element = name->AsArrayIndex(&index);
-
- // Special case for elements if any of the flags might be involved.
- // If elements are in fast case we always implicitly assume that:
- // DONT_DELETE: false, DONT_ENUM: false, READ_ONLY: false.
- if (is_element && (attr != NONE ||
- js_object->HasLocalElement(index) == JSObject::DICTIONARY_ELEMENT)) {
- // Normalize the elements to enable attributes on the property.
- if (js_object->IsJSGlobalProxy()) {
- // We do not need to do access checks here since these has already
- // been performed by the call to GetOwnProperty.
- Handle<Object> proto(js_object->GetPrototype());
- // If proxy is detached, ignore the assignment. Alternatively,
- // we could throw an exception.
- if (proto->IsNull()) return *obj_value;
- js_object = Handle<JSObject>::cast(proto);
- }
-
- // Don't allow element properties to be redefined on objects with external
- // array elements.
- if (js_object->HasExternalArrayElements()) {
- Handle<Object> args[2] = { js_object, name };
- Handle<Object> error =
- isolate->factory()->NewTypeError("redef_external_array_element",
- HandleVector(args, 2));
- return isolate->Throw(*error);
- }
-
- Handle<NumberDictionary> dictionary = NormalizeElements(js_object);
- // Make sure that we never go back to fast case.
- dictionary->set_requires_slow_elements();
- PropertyDetails details = PropertyDetails(attr, NORMAL);
- Handle<NumberDictionary> extended_dictionary =
- NumberDictionarySet(dictionary, index, obj_value, details);
- if (*extended_dictionary != *dictionary) {
- if (js_object->GetElementsKind() == NON_STRICT_ARGUMENTS_ELEMENTS) {
- FixedArray::cast(js_object->elements())->set(1, *extended_dictionary);
- } else {
- js_object->set_elements(*extended_dictionary);
- }
- }
- return *obj_value;
- }
-
LookupResult result(isolate);
js_object->LocalLookupRealNamedProperty(*name, &result);
- // To be compatible with safari we do not change the value on API objects
- // in defineProperty. Firefox disagrees here, and actually changes the value.
- if (result.IsProperty() &&
- (result.type() == CALLBACKS) &&
- result.GetCallbackObject()->IsAccessorInfo()) {
- return isolate->heap()->undefined_value();
+ // Special case for callback properties.
+ if (result.IsFound() && result.type() == CALLBACKS) {
+ Object* callback = result.GetCallbackObject();
+ // To be compatible with Safari we do not change the value on API objects
+ // in Object.defineProperty(). Firefox disagrees here, and actually changes
+ // the value.
+ if (callback->IsAccessorInfo()) {
+ return isolate->heap()->undefined_value();
+ }
+ // Avoid redefining foreign callback as data property, just use the stored
+ // setter to update the value instead.
+ // TODO(mstarzinger): So far this only works if property attributes don't
+ // change, this should be fixed once we cleanup the underlying code.
+ if (callback->IsForeign() && result.GetAttributes() == attr) {
+ return js_object->SetPropertyWithCallback(callback,
+ *name,
+ *obj_value,
+ result.holder(),
+ kStrictMode);
+ }
}
// Take special care when attributes are different and there is already
@@ -4429,7 +4411,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
// we don't have to check for null.
js_object = Handle<JSObject>(JSObject::cast(js_object->GetPrototype()));
}
- NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
+ JSObject::NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
// Use IgnoreAttributes version since a readonly property may be
// overridden and SetProperty does not allow this.
return js_object->SetLocalPropertyIgnoreAttributes(*name,
@@ -4445,34 +4427,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) {
}
-// Special case for elements if any of the flags are true.
-// If elements are in fast case we always implicitly assume that:
-// DONT_DELETE: false, DONT_ENUM: false, READ_ONLY: false.
-static MaybeObject* NormalizeObjectSetElement(Isolate* isolate,
- Handle<JSObject> js_object,
- uint32_t index,
- Handle<Object> value,
- PropertyAttributes attr) {
- // Normalize the elements to enable attributes on the property.
- Handle<NumberDictionary> dictionary = NormalizeElements(js_object);
- // Make sure that we never go back to fast case.
- dictionary->set_requires_slow_elements();
- PropertyDetails details = PropertyDetails(attr, NORMAL);
- Handle<NumberDictionary> extended_dictionary =
- NumberDictionarySet(dictionary, index, value, details);
- if (*extended_dictionary != *dictionary) {
- js_object->set_elements(*extended_dictionary);
- }
- return *value;
-}
-
-
MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
Handle<Object> object,
Handle<Object> key,
Handle<Object> value,
PropertyAttributes attr,
StrictModeFlag strict_mode) {
+ SetPropertyMode set_mode = attr == NONE ? SET_PROPERTY : DEFINE_PROPERTY;
HandleScope scope(isolate);
if (object->IsUndefined() || object->IsNull()) {
@@ -4510,11 +4471,8 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
return *value;
}
- if (((attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0)) {
- return NormalizeObjectSetElement(isolate, js_object, index, value, attr);
- }
-
- Handle<Object> result = SetElement(js_object, index, value, strict_mode);
+ Handle<Object> result = JSObject::SetElement(
+ js_object, index, value, attr, strict_mode, set_mode);
if (result.is_null()) return Failure::Exception();
return *value;
}
@@ -4522,18 +4480,13 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
if (key->IsString()) {
Handle<Object> result;
if (Handle<String>::cast(key)->AsArrayIndex(&index)) {
- if (((attr & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0)) {
- return NormalizeObjectSetElement(isolate,
- js_object,
- index,
- value,
- attr);
- }
- result = SetElement(js_object, index, value, strict_mode);
+ result = JSObject::SetElement(
+ js_object, index, value, attr, strict_mode, set_mode);
} else {
Handle<String> key_string = Handle<String>::cast(key);
key_string->TryFlatten();
- result = SetProperty(js_object, key_string, value, attr, strict_mode);
+ result = JSReceiver::SetProperty(
+ js_object, key_string, value, attr, strict_mode);
}
if (result.is_null()) return Failure::Exception();
return *value;
@@ -4546,7 +4499,8 @@ MaybeObject* Runtime::SetObjectProperty(Isolate* isolate,
Handle<String> name = Handle<String>::cast(converted);
if (name->AsArrayIndex(&index)) {
- return js_object->SetElement(index, *value, strict_mode, true);
+ return js_object->SetElement(
+ index, *value, attr, strict_mode, true, set_mode);
} else {
return js_object->SetProperty(*name, *value, attr, strict_mode);
}
@@ -4574,12 +4528,14 @@ MaybeObject* Runtime::ForceSetObjectProperty(Isolate* isolate,
return *value;
}
- return js_object->SetElement(index, *value, kNonStrictMode, true);
+ return js_object->SetElement(
+ index, *value, attr, kNonStrictMode, false, DEFINE_PROPERTY);
}
if (key->IsString()) {
if (Handle<String>::cast(key)->AsArrayIndex(&index)) {
- return js_object->SetElement(index, *value, kNonStrictMode, true);
+ return js_object->SetElement(
+ index, *value, attr, kNonStrictMode, false, DEFINE_PROPERTY);
} else {
Handle<String> key_string = Handle<String>::cast(key);
key_string->TryFlatten();
@@ -4596,7 +4552,8 @@ MaybeObject* Runtime::ForceSetObjectProperty(Isolate* isolate,
Handle<String> name = Handle<String>::cast(converted);
if (name->AsArrayIndex(&index)) {
- return js_object->SetElement(index, *value, kNonStrictMode, true);
+ return js_object->SetElement(
+ index, *value, attr, kNonStrictMode, false, DEFINE_PROPERTY);
} else {
return js_object->SetLocalPropertyIgnoreAttributes(*name, *value, attr);
}
@@ -4656,7 +4613,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetProperty) {
StrictModeFlag strict_mode = kNonStrictMode;
if (args.length() == 5) {
- CONVERT_STRICT_MODE_ARG(strict_mode_flag, 4);
+ CONVERT_STRICT_MODE_ARG_CHECKED(strict_mode_flag, 4);
strict_mode = strict_mode_flag;
}
@@ -4704,15 +4661,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetNativeFlag) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
RUNTIME_ASSERT(args.length() == 5);
- CONVERT_ARG_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
CONVERT_SMI_ARG_CHECKED(store_index, 1);
Handle<Object> value = args.at<Object>(2);
- CONVERT_ARG_CHECKED(FixedArray, literals, 3);
+ CONVERT_ARG_HANDLE_CHECKED(FixedArray, literals, 3);
CONVERT_SMI_ARG_CHECKED(literal_index, 4);
HandleScope scope;
Object* raw_boilerplate_object = literals->get(literal_index);
- Handle<JSArray> boilerplate_object(JSArray::cast(raw_boilerplate_object));
+ Handle<JSArray> boilerplate(JSArray::cast(raw_boilerplate_object));
#if DEBUG
ElementsKind elements_kind = object->GetElementsKind();
#endif
@@ -4722,18 +4679,24 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
if (value->IsNumber()) {
ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS);
- TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
+ JSObject::TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
+ if (IsMoreGeneralElementsKindTransition(boilerplate->GetElementsKind(),
+ FAST_DOUBLE_ELEMENTS)) {
+ JSObject::TransitionElementsKind(boilerplate, FAST_DOUBLE_ELEMENTS);
+ }
ASSERT(object->GetElementsKind() == FAST_DOUBLE_ELEMENTS);
- FixedDoubleArray* double_array =
- FixedDoubleArray::cast(object->elements());
+ FixedDoubleArray* double_array = FixedDoubleArray::cast(object->elements());
HeapNumber* number = HeapNumber::cast(*value);
double_array->set(store_index, number->Number());
} else {
ASSERT(elements_kind == FAST_SMI_ONLY_ELEMENTS ||
elements_kind == FAST_DOUBLE_ELEMENTS);
- TransitionElementsKind(object, FAST_ELEMENTS);
- FixedArray* object_array =
- FixedArray::cast(object->elements());
+ JSObject::TransitionElementsKind(object, FAST_ELEMENTS);
+ if (IsMoreGeneralElementsKindTransition(boilerplate->GetElementsKind(),
+ FAST_ELEMENTS)) {
+ JSObject::TransitionElementsKind(boilerplate, FAST_ELEMENTS);
+ }
+ FixedArray* object_array = FixedArray::cast(object->elements());
object_array->set(store_index, *value);
}
return *object;
@@ -4745,13 +4708,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreArrayLiteralElement) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_IgnoreAttributesAndSetProperty) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 3 || args.length() == 4);
- CONVERT_CHECKED(JSObject, object, args[0]);
- CONVERT_CHECKED(String, name, args[1]);
+ CONVERT_ARG_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_CHECKED(String, name, 1);
// Compute attributes.
PropertyAttributes attributes = NONE;
if (args.length() == 4) {
- CONVERT_CHECKED(Smi, value_obj, args[3]);
- int unchecked_value = value_obj->value();
+ CONVERT_SMI_ARG_CHECKED(unchecked_value, 3);
// Only attribute bits should be set.
RUNTIME_ASSERT(
(unchecked_value & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
@@ -4767,10 +4729,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteProperty) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(JSReceiver, object, args[0]);
- CONVERT_CHECKED(String, key, args[1]);
- CONVERT_SMI_ARG_CHECKED(strict, 2);
- return object->DeleteProperty(key, (strict == kStrictMode)
+ CONVERT_ARG_CHECKED(JSReceiver, object, 0);
+ CONVERT_ARG_CHECKED(String, key, 1);
+ CONVERT_STRICT_MODE_ARG_CHECKED(strict_mode, 2);
+ return object->DeleteProperty(key, (strict_mode == kStrictMode)
? JSReceiver::STRICT_DELETION
: JSReceiver::NORMAL_DELETION);
}
@@ -4797,7 +4759,7 @@ static Object* HasLocalPropertyImplementation(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_HasLocalProperty) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, key, args[1]);
+ CONVERT_ARG_CHECKED(String, key, 1);
uint32_t index;
const bool key_is_array_index = key->AsArrayIndex(&index);
@@ -4835,8 +4797,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HasLocalProperty) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_HasProperty) {
NoHandleAllocation na;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSReceiver, receiver, args[0]);
- CONVERT_CHECKED(String, key, args[1]);
+ CONVERT_ARG_CHECKED(JSReceiver, receiver, 0);
+ CONVERT_ARG_CHECKED(String, key, 1);
bool result = receiver->HasProperty(key);
if (isolate->has_pending_exception()) return Failure::Exception();
@@ -4847,10 +4809,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_HasProperty) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_HasElement) {
NoHandleAllocation na;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSReceiver, receiver, args[0]);
- CONVERT_CHECKED(Smi, index, args[1]);
+ CONVERT_ARG_CHECKED(JSReceiver, receiver, 0);
+ CONVERT_SMI_ARG_CHECKED(index, 1);
- bool result = receiver->HasElement(index->value());
+ bool result = receiver->HasElement(index);
if (isolate->has_pending_exception()) return Failure::Exception();
return isolate->heap()->ToBoolean(result);
}
@@ -4860,8 +4822,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSObject, object, args[0]);
- CONVERT_CHECKED(String, key, args[1]);
+ CONVERT_ARG_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_CHECKED(String, key, 1);
uint32_t index;
if (key->AsArrayIndex(&index)) {
@@ -4883,15 +4845,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) {
object = JSObject::cast(proto);
}
FixedArray* elements = FixedArray::cast(object->elements());
- NumberDictionary* dictionary = NULL;
+ SeededNumberDictionary* dictionary = NULL;
if (elements->map() ==
isolate->heap()->non_strict_arguments_elements_map()) {
- dictionary = NumberDictionary::cast(elements->get(1));
+ dictionary = SeededNumberDictionary::cast(elements->get(1));
} else {
- dictionary = NumberDictionary::cast(elements);
+ dictionary = SeededNumberDictionary::cast(elements);
}
int entry = dictionary->FindEntry(index);
- ASSERT(entry != NumberDictionary::kNotFound);
+ ASSERT(entry != SeededNumberDictionary::kNotFound);
PropertyDetails details = dictionary->DetailsAt(entry);
return isolate->heap()->ToBoolean(!details.IsDontEnum());
}
@@ -4906,7 +4868,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IsPropertyEnumerable) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSReceiver, object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, object, 0);
bool threw = false;
Handle<JSArray> result = GetKeysFor(object, &threw);
if (threw) return Failure::Exception();
@@ -4922,7 +4884,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNames) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetPropertyNamesFast) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSReceiver, raw_object, args[0]);
+ CONVERT_ARG_CHECKED(JSReceiver, raw_object, 0);
if (raw_object->IsSimpleEnum()) return raw_object->map();
@@ -4963,7 +4925,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
if (!args[0]->IsJSObject()) {
return isolate->heap()->undefined_value();
}
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
// Skip the global proxy as it has no properties and always delegates to the
// real global object.
@@ -4996,7 +4958,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
return *isolate->factory()->NewJSArray(0);
}
int n;
- n = jsproto->NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE));
+ n = jsproto->NumberOfLocalProperties();
local_property_count[i] = n;
total_property_count += n;
if (i < length - 1) {
@@ -5050,7 +5012,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalElementNames) {
if (!args[0]->IsJSObject()) {
return isolate->heap()->undefined_value();
}
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
int n = obj->NumberOfLocalElements(static_cast<PropertyAttributes>(NONE));
Handle<FixedArray> names = isolate->factory()->NewFixedArray(n);
@@ -5067,7 +5029,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetInterceptorInfo) {
if (!args[0]->IsJSObject()) {
return Smi::FromInt(0);
}
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
int result = 0;
if (obj->HasNamedInterceptor()) result |= 2;
@@ -5082,7 +5044,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetInterceptorInfo) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetNamedInterceptorPropertyNames) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
if (obj->HasNamedInterceptor()) {
v8::Handle<v8::Array> result = GetKeysForNamedInterceptor(obj, obj);
@@ -5097,7 +5059,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetNamedInterceptorPropertyNames) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetIndexedInterceptorElementNames) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
if (obj->HasIndexedInterceptor()) {
v8::Handle<v8::Array> result = GetKeysForIndexedInterceptor(obj, obj);
@@ -5109,7 +5071,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetIndexedInterceptorElementNames) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LocalKeys) {
ASSERT_EQ(args.length(), 1);
- CONVERT_CHECKED(JSObject, raw_object, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, raw_object, 0);
HandleScope scope(isolate);
Handle<JSObject> object(raw_object);
@@ -5196,7 +5158,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) {
if (key->Equals(isolate->heap()->callee_symbol())) {
Object* function = frame->function();
if (function->IsJSFunction() &&
- JSFunction::cast(function)->shared()->strict_mode()) {
+ !JSFunction::cast(function)->shared()->is_classic_mode()) {
return isolate->Throw(*isolate->factory()->NewTypeError(
"strict_arguments_callee", HandleVector<Object>(NULL, 0)));
}
@@ -5209,31 +5171,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArgumentsProperty) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_ToFastProperties) {
- HandleScope scope(isolate);
-
ASSERT(args.length() == 1);
- Handle<Object> object = args.at<Object>(0);
- if (object->IsJSObject()) {
- Handle<JSObject> js_object = Handle<JSObject>::cast(object);
- if (!js_object->HasFastProperties() && !js_object->IsGlobalObject()) {
- MaybeObject* ok = js_object->TransformToFastProperties(0);
- if (ok->IsRetryAfterGC()) return ok;
- }
- }
- return *object;
+ Object* object = args[0];
+ return (object->IsJSObject() && !object->IsGlobalObject())
+ ? JSObject::cast(object)->TransformToFastProperties(0)
+ : object;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ToSlowProperties) {
- HandleScope scope(isolate);
-
ASSERT(args.length() == 1);
- Handle<Object> object = args.at<Object>(0);
- if (object->IsJSObject() && !object->IsJSGlobalProxy()) {
- Handle<JSObject> js_object = Handle<JSObject>::cast(object);
- NormalizeProperties(js_object, CLEAR_INOBJECT_PROPERTIES, 0);
- }
- return *object;
+ Object* obj = args[0];
+ return (obj->IsJSObject() && !obj->IsJSGlobalProxy())
+ ? JSObject::cast(obj)->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES, 0)
+ : obj;
}
@@ -5312,7 +5263,7 @@ static int ParseDecimalInteger(const char*s, int from, int to) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToNumber) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, subject, args[0]);
+ CONVERT_ARG_CHECKED(String, subject, 0);
subject->TryFlatten();
// Fast case: short integer or some sorts of junk values.
@@ -5368,7 +5319,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringFromCharCodeArray) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSArray, codes, args[0]);
+ CONVERT_ARG_CHECKED(JSArray, codes, 0);
int length = Smi::cast(codes->length())->value();
// Check if the string can be ASCII.
@@ -5448,7 +5399,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_URIEscape) {
const char hex_chars[] = "0123456789ABCDEF";
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, source, args[0]);
+ CONVERT_ARG_CHECKED(String, source, 0);
source->TryFlatten();
@@ -5566,7 +5517,7 @@ static inline int Unescape(String* source,
RUNTIME_FUNCTION(MaybeObject*, Runtime_URIUnescape) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, source, args[0]);
+ CONVERT_ARG_CHECKED(String, source, 0);
source->TryFlatten();
@@ -5823,7 +5774,7 @@ static MaybeObject* QuoteJsonString(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONString) {
NoHandleAllocation ha;
- CONVERT_CHECKED(String, str, args[0]);
+ CONVERT_ARG_CHECKED(String, str, 0);
if (!str->IsFlat()) {
MaybeObject* try_flatten = str->TryFlatten();
Object* flat;
@@ -5847,7 +5798,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONString) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringComma) {
NoHandleAllocation ha;
- CONVERT_CHECKED(String, str, args[0]);
+ CONVERT_ARG_CHECKED(String, str, 0);
if (!str->IsFlat()) {
MaybeObject* try_flatten = str->TryFlatten();
Object* flat;
@@ -5924,7 +5875,7 @@ static MaybeObject* QuoteJsonStringArray(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringArray) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSArray, array, args[0]);
+ CONVERT_ARG_CHECKED(JSArray, array, 0);
if (!array->HasFastElements()) return isolate->heap()->undefined_value();
FixedArray* elements = FixedArray::cast(array->elements());
@@ -5966,7 +5917,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_QuoteJSONStringArray) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseInt) {
NoHandleAllocation ha;
- CONVERT_CHECKED(String, s, args[0]);
+ CONVERT_ARG_CHECKED(String, s, 0);
CONVERT_SMI_ARG_CHECKED(radix, 1);
s->TryFlatten();
@@ -5979,7 +5930,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseInt) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringParseFloat) {
NoHandleAllocation ha;
- CONVERT_CHECKED(String, str, args[0]);
+ CONVERT_ARG_CHECKED(String, str, 0);
// ECMA-262 section 15.1.2.3, empty string is NaN
double value = StringToDouble(isolate->unicode_cache(),
@@ -6004,8 +5955,8 @@ MUST_USE_RESULT static MaybeObject* ConvertCaseHelper(
//
// Allocate the resulting string.
//
- // NOTE: This assumes that the upper/lower case of an ascii
- // character is also ascii. This is currently the case, but it
+ // NOTE: This assumes that the upper/lower case of an ASCII
+ // character is also ASCII. This is currently the case, but it
// might break in the future if we implement more context and locale
// dependent upper/lower conversions.
Object* o;
@@ -6105,9 +6056,9 @@ static const uintptr_t kOneInEveryByte = kUintptrAllBitsSet / 0xFF;
// This function is only useful when it can be inlined and the
// boundaries are statically known.
// Requires: all bytes in the input word and the boundaries must be
-// ascii (less than 0x7F).
+// ASCII (less than 0x7F).
static inline uintptr_t AsciiRangeMask(uintptr_t w, char m, char n) {
- // Every byte in an ascii string is less than or equal to 0x7F.
+ // Every byte in an ASCII string is less than or equal to 0x7F.
ASSERT((w & (kOneInEveryByte * 0x7F)) == w);
// Use strict inequalities since in edge cases the function could be
// further simplified.
@@ -6228,17 +6179,17 @@ MUST_USE_RESULT static MaybeObject* ConvertCase(
Isolate* isolate,
unibrow::Mapping<typename ConvertTraits::UnibrowConverter, 128>* mapping) {
NoHandleAllocation ha;
- CONVERT_CHECKED(String, s, args[0]);
+ CONVERT_ARG_CHECKED(String, s, 0);
s = s->TryFlattenGetString();
const int length = s->length();
// Assume that the string is not empty; we need this assumption later
if (length == 0) return s;
- // Simpler handling of ascii strings.
+ // Simpler handling of ASCII strings.
//
- // NOTE: This assumes that the upper/lower case of an ascii
- // character is also ascii. This is currently the case, but it
+ // NOTE: This assumes that the upper/lower case of an ASCII
+ // character is also ASCII. This is currently the case, but it
// might break in the future if we implement more context and locale
// dependent upper/lower conversions.
if (s->IsSeqAsciiString()) {
@@ -6290,9 +6241,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringTrim) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(String, s, args[0]);
- CONVERT_BOOLEAN_CHECKED(trimLeft, args[1]);
- CONVERT_BOOLEAN_CHECKED(trimRight, args[2]);
+ CONVERT_ARG_CHECKED(String, s, 0);
+ CONVERT_BOOLEAN_ARG_CHECKED(trimLeft, 1);
+ CONVERT_BOOLEAN_ARG_CHECKED(trimRight, 2);
s->TryFlatten();
int length = s->length();
@@ -6317,8 +6268,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringTrim) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
ASSERT(args.length() == 3);
HandleScope handle_scope(isolate);
- CONVERT_ARG_CHECKED(String, subject, 0);
- CONVERT_ARG_CHECKED(String, pattern, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, subject, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, pattern, 1);
CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[2]);
int subject_length = subject->length();
@@ -6365,7 +6316,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
int part_count = indices.length();
Handle<JSArray> result = isolate->factory()->NewJSArray(part_count);
- MaybeObject* maybe_result = result->EnsureCanContainNonSmiElements();
+ MaybeObject* maybe_result = result->EnsureCanContainHeapObjectElements();
if (maybe_result->IsFailure()) return maybe_result;
result->set_length(Smi::FromInt(part_count));
@@ -6401,7 +6352,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringSplit) {
}
-// Copies ascii characters to the given fixed array looking up
+// Copies ASCII characters to the given fixed array looking up
// one-char strings in the cache. Gives up on the first char that is
// not in the cache and fills the remainder with smi zeros. Returns
// the length of the successfully copied prefix.
@@ -6439,7 +6390,7 @@ static int CopyCachedAsciiCharsToArray(Heap* heap,
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToArray) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(String, s, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, s, 0);
CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]);
s = FlattenGetString(s);
@@ -6490,7 +6441,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringToArray) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewStringWrapper) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, value, args[0]);
+ CONVERT_ARG_CHECKED(String, value, 0);
return value->ToObject();
}
@@ -6681,8 +6632,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberMod) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringAdd) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, str1, args[0]);
- CONVERT_CHECKED(String, str2, args[1]);
+ CONVERT_ARG_CHECKED(String, str1, 0);
+ CONVERT_ARG_CHECKED(String, str2, 1);
isolate->counters()->string_add_runtime()->Increment();
return isolate->heap()->AllocateConsString(str1, str2);
}
@@ -6730,18 +6681,18 @@ static inline void StringBuilderConcatHelper(String* special,
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(JSArray, array, args[0]);
+ CONVERT_ARG_CHECKED(JSArray, array, 0);
if (!args[1]->IsSmi()) {
isolate->context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
int array_length = args.smi_at(1);
- CONVERT_CHECKED(String, special, args[2]);
+ CONVERT_ARG_CHECKED(String, special, 2);
// This assumption is used by the slice encoding in one or two smis.
ASSERT(Smi::kMaxValue >= String::kMaxLength);
- MaybeObject* maybe_result = array->EnsureCanContainNonSmiElements();
+ MaybeObject* maybe_result = array->EnsureCanContainHeapObjectElements();
if (maybe_result->IsFailure()) return maybe_result;
int special_length = special->length();
@@ -6805,6 +6756,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
ascii = false;
}
} else {
+ ASSERT(!elt->IsTheHole());
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
}
if (increment > String::kMaxLength - position) {
@@ -6846,13 +6798,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderConcat) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_StringBuilderJoin) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(JSArray, array, args[0]);
+ CONVERT_ARG_CHECKED(JSArray, array, 0);
if (!args[1]->IsSmi()) {
isolate->context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
int array_length = args.smi_at(1);
- CONVERT_CHECKED(String, separator, args[2]);
+ CONVERT_ARG_CHECKED(String, separator, 2);
if (!array->HasFastElements()) {
return isolate->Throw(isolate->heap()->illegal_argument_symbol());
@@ -6970,11 +6922,11 @@ static void JoinSparseArrayWithSeparator(FixedArray* elements,
RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
- CONVERT_CHECKED(JSArray, elements_array, args[0]);
+ CONVERT_ARG_CHECKED(JSArray, elements_array, 0);
RUNTIME_ASSERT(elements_array->HasFastElements() ||
elements_array->HasFastSmiOnlyElements());
CONVERT_NUMBER_CHECKED(uint32_t, array_length, Uint32, args[1]);
- CONVERT_CHECKED(String, separator, args[2]);
+ CONVERT_ARG_CHECKED(String, separator, 2);
// elements_array is fast-mode JSarray of alternating positions
// (increasing order) and strings.
// array_length is length of original array (used to add separators);
@@ -6982,8 +6934,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) {
// Find total length of join result.
int string_length = 0;
- bool is_ascii = true;
- int max_string_length = SeqAsciiString::kMaxLength;
+ bool is_ascii = separator->IsAsciiRepresentation();
+ int max_string_length;
+ if (is_ascii) {
+ max_string_length = SeqAsciiString::kMaxLength;
+ } else {
+ max_string_length = SeqTwoByteString::kMaxLength;
+ }
bool overflow = false;
CONVERT_NUMBER_CHECKED(int, elements_length,
Int32, elements_array->length());
@@ -6991,7 +6948,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SparseJoinWithSeparator) {
FixedArray* elements = FixedArray::cast(elements_array->elements());
for (int i = 0; i < elements_length; i += 2) {
RUNTIME_ASSERT(elements->get(i)->IsNumber());
- CONVERT_CHECKED(String, string, elements->get(i + 1));
+ RUNTIME_ASSERT(elements->get(i + 1)->IsString());
+ String* string = String::cast(elements->get(i + 1));
int length = string->length();
if (is_ascii && !string->IsAsciiRepresentation()) {
is_ascii = false;
@@ -7149,8 +7107,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringEquals) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, x, args[0]);
- CONVERT_CHECKED(String, y, args[1]);
+ CONVERT_ARG_CHECKED(String, x, 0);
+ CONVERT_ARG_CHECKED(String, y, 1);
bool not_equal = !x->Equals(y);
// This is slightly convoluted because the value that signifies
@@ -7163,29 +7121,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringEquals) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_UserObjectEquals) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 2);
-
- CONVERT_CHECKED(JSObject, lhs, args[1]);
- CONVERT_CHECKED(JSObject, rhs, args[0]);
-
- bool result;
-
- v8::UserObjectComparisonCallback callback = isolate->UserObjectComparisonCallback();
- if (callback) {
- HandleScope scope(isolate);
- Handle<JSObject> lhs_handle(lhs);
- Handle<JSObject> rhs_handle(rhs);
- result = callback(v8::Utils::ToLocal(lhs_handle), v8::Utils::ToLocal(rhs_handle));
- } else {
- result = (lhs == rhs);
- }
-
- return Smi::FromInt(result?0:1);
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberCompare) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
@@ -7204,12 +7139,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NumberCompare) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SmiLexicographicCompare) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
-
- // Extract the integer values from the Smis.
- CONVERT_CHECKED(Smi, x, args[0]);
- CONVERT_CHECKED(Smi, y, args[1]);
- int x_value = x->value();
- int y_value = y->value();
+ CONVERT_SMI_ARG_CHECKED(x_value, 0);
+ CONVERT_SMI_ARG_CHECKED(y_value, 1);
// If the integers are equal so are the string representations.
if (x_value == y_value) return Smi::FromInt(EQUAL);
@@ -7349,8 +7280,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StringCompare) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, x, args[0]);
- CONVERT_CHECKED(String, y, args[1]);
+ CONVERT_ARG_CHECKED(String, x, 0);
+ CONVERT_ARG_CHECKED(String, y, 1);
isolate->counters()->string_compare_runtime()->Increment();
@@ -7485,7 +7416,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_log) {
return isolate->transcendental_cache()->Get(TranscendentalCache::LOG, x);
}
-
+// Slow version of Math.pow. We check for fast paths for special cases.
+// Used if SSE2/VFP3 is not available.
RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -7501,22 +7433,38 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow) {
}
CONVERT_DOUBLE_ARG_CHECKED(y, 1);
- return isolate->heap()->AllocateHeapNumber(power_double_double(x, y));
+ int y_int = static_cast<int>(y);
+ double result;
+ if (y == y_int) {
+ result = power_double_int(x, y_int); // Returns 1 if exponent is 0.
+ } else if (y == 0.5) {
+ result = (isinf(x)) ? V8_INFINITY
+ : fast_sqrt(x + 0.0); // Convert -0 to +0.
+ } else if (y == -0.5) {
+ result = (isinf(x)) ? 0
+ : 1.0 / fast_sqrt(x + 0.0); // Convert -0 to +0.
+ } else {
+ result = power_double_double(x, y);
+ }
+ if (isnan(result)) return isolate->heap()->nan_value();
+ return isolate->heap()->AllocateHeapNumber(result);
}
-// Fast version of Math.pow if we know that y is not an integer and
-// y is not -0.5 or 0.5. Used as slowcase from codegen.
+// Fast version of Math.pow if we know that y is not an integer and y is not
+// -0.5 or 0.5. Used as slow case from full codegen.
RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_pow_cfunction) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
+ isolate->counters()->math_pow()->Increment();
+
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
CONVERT_DOUBLE_ARG_CHECKED(y, 1);
if (y == 0) {
return Smi::FromInt(1);
- } else if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) {
- return isolate->heap()->nan_value();
} else {
- return isolate->heap()->AllocateHeapNumber(pow(x, y));
+ double result = power_double_double(x, y);
+ if (isnan(result)) return isolate->heap()->nan_value();
+ return isolate->heap()->AllocateHeapNumber(result);
}
}
@@ -7546,7 +7494,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RoundNumber) {
// We compare with kSmiValueSize - 2 because (2^30 - 0.1) has exponent 29 and
// should be rounded to 2^30, which is not smi (for 31-bit smis, similar
- // agument holds for 32-bit smis).
+ // argument holds for 32-bit smis).
if (!sign && exponent < kSmiValueSize - 2) {
return Smi::FromInt(static_cast<int>(value + 0.5));
}
@@ -7580,7 +7528,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_sqrt) {
isolate->counters()->math_sqrt()->Increment();
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
- return isolate->heap()->AllocateHeapNumber(sqrt(x));
+ return isolate->heap()->AllocateHeapNumber(fast_sqrt(x));
}
@@ -7594,51 +7542,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Math_tan) {
}
-static int MakeDay(int year, int month) {
- static const int day_from_month[] = {0, 31, 59, 90, 120, 151,
- 181, 212, 243, 273, 304, 334};
- static const int day_from_month_leap[] = {0, 31, 60, 91, 121, 152,
- 182, 213, 244, 274, 305, 335};
-
- year += month / 12;
- month %= 12;
- if (month < 0) {
- year--;
- month += 12;
- }
-
- ASSERT(month >= 0);
- ASSERT(month < 12);
-
- // year_delta is an arbitrary number such that:
- // a) year_delta = -1 (mod 400)
- // b) year + year_delta > 0 for years in the range defined by
- // ECMA 262 - 15.9.1.1, i.e. upto 100,000,000 days on either side of
- // Jan 1 1970. This is required so that we don't run into integer
- // division of negative numbers.
- // c) there shouldn't be an overflow for 32-bit integers in the following
- // operations.
- static const int year_delta = 399999;
- static const int base_day = 365 * (1970 + year_delta) +
- (1970 + year_delta) / 4 -
- (1970 + year_delta) / 100 +
- (1970 + year_delta) / 400;
-
- int year1 = year + year_delta;
- int day_from_year = 365 * year1 +
- year1 / 4 -
- year1 / 100 +
- year1 / 400 -
- base_day;
-
- if ((year % 4 != 0) || (year % 100 == 0 && year % 400 != 0)) {
- return day_from_year + day_from_month[month];
- }
-
- return day_from_year + day_from_month_leap[month];
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_DateMakeDay) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -7646,319 +7549,44 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateMakeDay) {
CONVERT_SMI_ARG_CHECKED(year, 0);
CONVERT_SMI_ARG_CHECKED(month, 1);
- return Smi::FromInt(MakeDay(year, month));
-}
-
-
-static const int kDays4Years[] = {0, 365, 2 * 365, 3 * 365 + 1};
-static const int kDaysIn4Years = 4 * 365 + 1;
-static const int kDaysIn100Years = 25 * kDaysIn4Years - 1;
-static const int kDaysIn400Years = 4 * kDaysIn100Years + 1;
-static const int kDays1970to2000 = 30 * 365 + 7;
-static const int kDaysOffset = 1000 * kDaysIn400Years + 5 * kDaysIn400Years -
- kDays1970to2000;
-static const int kYearsOffset = 400000;
-
-static const char kDayInYear[] = {
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
-
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30,
- 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
- 22, 23, 24, 25, 26, 27, 28, 29, 30, 31};
-
-static const char kMonthInYear[] = {
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
-
- 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
- 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1,
- 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2,
- 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3,
- 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
- 4, 4, 4, 4, 4, 4,
- 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
- 5, 5, 5, 5, 5,
- 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
- 6, 6, 6, 6, 6, 6,
- 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
- 7, 7, 7, 7, 7, 7,
- 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
- 8, 8, 8, 8, 8,
- 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
- 9, 9, 9, 9, 9, 9,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11};
-
-
-// This function works for dates from 1970 to 2099.
-static inline void DateYMDFromTimeAfter1970(int date,
- int& year, int& month, int& day) {
-#ifdef DEBUG
- int save_date = date; // Need this for ASSERT in the end.
-#endif
-
- year = 1970 + (4 * date + 2) / kDaysIn4Years;
- date %= kDaysIn4Years;
-
- month = kMonthInYear[date];
- day = kDayInYear[date];
-
- ASSERT(MakeDay(year, month) + day - 1 == save_date);
+ return Smi::FromInt(isolate->date_cache()->DaysFromYearMonth(year, month));
}
-static inline void DateYMDFromTimeSlow(int date,
- int& year, int& month, int& day) {
-#ifdef DEBUG
- int save_date = date; // Need this for ASSERT in the end.
-#endif
-
- date += kDaysOffset;
- year = 400 * (date / kDaysIn400Years) - kYearsOffset;
- date %= kDaysIn400Years;
-
- ASSERT(MakeDay(year, 0) + date == save_date);
-
- date--;
- int yd1 = date / kDaysIn100Years;
- date %= kDaysIn100Years;
- year += 100 * yd1;
-
- date++;
- int yd2 = date / kDaysIn4Years;
- date %= kDaysIn4Years;
- year += 4 * yd2;
-
- date--;
- int yd3 = date / 365;
- date %= 365;
- year += yd3;
-
- bool is_leap = (!yd1 || yd2) && !yd3;
-
- ASSERT(date >= -1);
- ASSERT(is_leap || (date >= 0));
- ASSERT((date < 365) || (is_leap && (date < 366)));
- ASSERT(is_leap == ((year % 4 == 0) && (year % 100 || (year % 400 == 0))));
- ASSERT(is_leap || ((MakeDay(year, 0) + date) == save_date));
- ASSERT(!is_leap || ((MakeDay(year, 0) + date + 1) == save_date));
-
- if (is_leap) {
- day = kDayInYear[2*365 + 1 + date];
- month = kMonthInYear[2*365 + 1 + date];
- } else {
- day = kDayInYear[date];
- month = kMonthInYear[date];
- }
-
- ASSERT(MakeDay(year, month) + day - 1 == save_date);
-}
-
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DateSetValue) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 3);
-static inline void DateYMDFromTime(int date,
- int& year, int& month, int& day) {
- if (date >= 0 && date < 32 * kDaysIn4Years) {
- DateYMDFromTimeAfter1970(date, year, month, day);
+ CONVERT_ARG_HANDLE_CHECKED(JSDate, date, 0);
+ CONVERT_DOUBLE_ARG_CHECKED(time, 1);
+ CONVERT_SMI_ARG_CHECKED(is_utc, 2);
+
+ DateCache* date_cache = isolate->date_cache();
+
+ Object* value = NULL;
+ bool is_value_nan = false;
+ if (isnan(time)) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
+ } else if (!is_utc &&
+ (time < -DateCache::kMaxTimeBeforeUTCInMs ||
+ time > DateCache::kMaxTimeBeforeUTCInMs)) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
} else {
- DateYMDFromTimeSlow(date, year, month, day);
+ time = is_utc ? time : date_cache->ToUTC(static_cast<int64_t>(time));
+ if (time < -DateCache::kMaxTimeInMs ||
+ time > DateCache::kMaxTimeInMs) {
+ value = isolate->heap()->nan_value();
+ is_value_nan = true;
+ } else {
+ MaybeObject* maybe_result =
+ isolate->heap()->AllocateHeapNumber(DoubleToInteger(time));
+ if (!maybe_result->ToObject(&value)) return maybe_result;
+ }
}
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateYMDFromTime) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 2);
-
- CONVERT_DOUBLE_ARG_CHECKED(t, 0);
- CONVERT_CHECKED(JSArray, res_array, args[1]);
-
- int year, month, day;
- DateYMDFromTime(static_cast<int>(floor(t / 86400000)), year, month, day);
-
- FixedArrayBase* elms_base = FixedArrayBase::cast(res_array->elements());
- RUNTIME_ASSERT(elms_base->length() == 3);
- RUNTIME_ASSERT(res_array->HasFastTypeElements());
-
- MaybeObject* maybe = res_array->EnsureWritableFastElements();
- if (maybe->IsFailure()) return maybe;
- FixedArray* elms = FixedArray::cast(res_array->elements());
- elms->set(0, Smi::FromInt(year));
- elms->set(1, Smi::FromInt(month));
- elms->set(2, Smi::FromInt(day));
-
- return isolate->heap()->undefined_value();
+ date->SetValue(value, is_value_nan);
+ return value;
}
@@ -8007,14 +7635,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewArgumentsFast) {
--index;
}
- ScopeInfo<> scope_info(callee->shared()->scope_info());
+ Handle<ScopeInfo> scope_info(callee->shared()->scope_info());
while (index >= 0) {
// Detect duplicate names to the right in the parameter list.
- Handle<String> name = scope_info.parameter_name(index);
- int context_slot_count = scope_info.number_of_context_slots();
+ Handle<String> name(scope_info->ParameterName(index));
+ int context_local_count = scope_info->ContextLocalCount();
bool duplicate = false;
for (int j = index + 1; j < parameter_count; ++j) {
- if (scope_info.parameter_name(j).is_identical_to(name)) {
+ if (scope_info->ParameterName(j) == *name) {
duplicate = true;
break;
}
@@ -8029,17 +7657,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewArgumentsFast) {
// The context index goes in the parameter map with a hole in the
// arguments array.
int context_index = -1;
- for (int j = Context::MIN_CONTEXT_SLOTS;
- j < context_slot_count;
- ++j) {
- if (scope_info.context_slot_name(j).is_identical_to(name)) {
+ for (int j = 0; j < context_local_count; ++j) {
+ if (scope_info->ContextLocalName(j) == *name) {
context_index = j;
break;
}
}
ASSERT(context_index >= 0);
arguments->set_the_hole(index);
- parameter_map->set(index + 2, Smi::FromInt(context_index));
+ parameter_map->set(index + 2, Smi::FromInt(
+ Context::MIN_CONTEXT_SLOTS + context_index));
}
--index;
@@ -8082,7 +7709,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewStrictArgumentsFast) {
AssertNoAllocation no_gc;
FixedArray* array = reinterpret_cast<FixedArray*>(obj);
- array->set_map(isolate->heap()->fixed_array_map());
+ array->set_map_no_write_barrier(isolate->heap()->fixed_array_map());
array->set_length(length);
WriteBarrierMode mode = array->GetWriteBarrierMode(no_gc);
@@ -8098,9 +7725,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewStrictArgumentsFast) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewClosure) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(Context, context, 0);
- CONVERT_ARG_CHECKED(SharedFunctionInfo, shared, 1);
- CONVERT_BOOLEAN_CHECKED(pretenure, args[2]);
+ CONVERT_ARG_HANDLE_CHECKED(Context, context, 0);
+ CONVERT_ARG_HANDLE_CHECKED(SharedFunctionInfo, shared, 1);
+ CONVERT_BOOLEAN_ARG_CHECKED(pretenure, 2);
// The caller ensures that we pretenure closures that are assigned
// directly to properties.
@@ -8125,13 +7752,15 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments(
List<JSFunction*> functions(2);
frame->GetFunctions(&functions);
if (functions.length() > 1) {
- int inlined_frame_index = functions.length() - 1;
- JSFunction* inlined_function = functions[inlined_frame_index];
- int args_count = inlined_function->shared()->formal_parameter_count();
- ScopedVector<SlotRef> args_slots(args_count);
- SlotRef::ComputeSlotMappingForArguments(frame,
- inlined_frame_index,
- &args_slots);
+ int inlined_jsframe_index = functions.length() - 1;
+ JSFunction* inlined_function = functions[inlined_jsframe_index];
+ Vector<SlotRef> args_slots =
+ SlotRef::ComputeSlotMappingForArguments(
+ frame,
+ inlined_jsframe_index,
+ inlined_function->shared()->formal_parameter_count());
+
+ int args_count = args_slots.length();
*total_argc = prefix_argc + args_count;
SmartArrayPointer<Handle<Object> > param_data(
@@ -8140,6 +7769,9 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments(
Handle<Object> val = args_slots[i].GetValue();
param_data[prefix_argc + i] = val;
}
+
+ args_slots.Dispose();
+
return param_data;
} else {
it.AdvanceToArgumentsFrame();
@@ -8161,7 +7793,7 @@ static SmartArrayPointer<Handle<Object> > GetCallerArguments(
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) {
HandleScope scope(isolate);
ASSERT(args.length() == 4);
- CONVERT_ARG_CHECKED(JSFunction, bound_function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, bound_function, 0);
RUNTIME_ASSERT(args[3]->IsNumber());
Handle<Object> bindee = args.at<Object>(1);
@@ -8202,7 +7834,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) {
for (int j = 0; j < argc; j++, i++) {
new_bindings->set(i, *arguments[j + 1]);
}
- new_bindings->set_map(isolate->heap()->fixed_cow_array_map());
+ new_bindings->set_map_no_write_barrier(
+ isolate->heap()->fixed_cow_array_map());
bound_function->set_function_bindings(*new_bindings);
// Update length.
@@ -8218,7 +7851,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionBindArguments) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionGetBindings) {
HandleScope handles(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSObject, callable, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, callable, 0);
if (callable->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(callable);
if (function->shared()->bound()) {
@@ -8235,7 +7868,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObjectFromBound) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
// First argument is a function to use as a constructor.
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
RUNTIME_ASSERT(function->shared()->bound());
// The argument is a bound function. Extract its bound arguments
@@ -8376,7 +8009,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
function->shared()->CompleteInobjectSlackTracking();
TrySettingInlineConstructStub(isolate, function);
@@ -8436,6 +8069,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
function->ReplaceCode(function->shared()->code());
return function->code();
}
+ function->shared()->code()->set_profiler_ticks(0);
if (JSFunction::CompileOptimized(function,
AstNode::kNoNumber,
CLEAR_EXCEPTION)) {
@@ -8476,25 +8110,8 @@ class ActivationsFinder : public ThreadVisitor {
};
-RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
- HandleScope scope(isolate);
- ASSERT(args.length() == 1);
- RUNTIME_ASSERT(args[0]->IsSmi());
- Deoptimizer::BailoutType type =
- static_cast<Deoptimizer::BailoutType>(args.smi_at(0));
- Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
- ASSERT(isolate->heap()->IsAllocationAllowed());
- int frames = deoptimizer->output_count();
-
- deoptimizer->MaterializeHeapNumbers();
- delete deoptimizer;
-
- JavaScriptFrameIterator it(isolate);
- JavaScriptFrame* frame = NULL;
- for (int i = 0; i < frames - 1; i++) it.Advance();
- frame = it.frame();
-
- RUNTIME_ASSERT(frame->function()->IsJSFunction());
+static void MaterializeArgumentsObjectInFrame(Isolate* isolate,
+ JavaScriptFrame* frame) {
Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
Handle<Object> arguments;
for (int i = frame->ComputeExpressionsCount() - 1; i >= 0; --i) {
@@ -8511,6 +8128,32 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
frame->SetExpression(i, *arguments);
}
}
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyDeoptimized) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ RUNTIME_ASSERT(args[0]->IsSmi());
+ Deoptimizer::BailoutType type =
+ static_cast<Deoptimizer::BailoutType>(args.smi_at(0));
+ Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
+ ASSERT(isolate->heap()->IsAllocationAllowed());
+ int jsframes = deoptimizer->jsframe_count();
+
+ deoptimizer->MaterializeHeapNumbers();
+ delete deoptimizer;
+
+ JavaScriptFrameIterator it(isolate);
+ for (int i = 0; i < jsframes - 1; i++) {
+ MaterializeArgumentsObjectInFrame(isolate, it.frame());
+ it.Advance();
+ }
+
+ JavaScriptFrame* frame = it.frame();
+ RUNTIME_ASSERT(frame->function()->IsJSFunction());
+ Handle<JSFunction> function(JSFunction::cast(frame->function()), isolate);
+ MaterializeArgumentsObjectInFrame(isolate, frame);
if (type == Deoptimizer::EAGER) {
RUNTIME_ASSERT(function->IsOptimized());
@@ -8563,7 +8206,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NotifyOSR) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DeoptimizeFunction) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (!function->IsOptimized()) return isolate->heap()->undefined_value();
Deoptimizer::DeoptimizeFunction(*function);
@@ -8583,10 +8226,22 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RunningInSimulator) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) {
HandleScope scope(isolate);
- ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ RUNTIME_ASSERT(args.length() == 1 || args.length() == 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
+
if (!function->IsOptimizable()) return isolate->heap()->undefined_value();
function->MarkForLazyRecompilation();
+
+ Code* unoptimized = function->shared()->code();
+ if (args.length() == 2 &&
+ unoptimized->kind() == Code::FUNCTION) {
+ CONVERT_ARG_HANDLE_CHECKED(String, type, 1);
+ CHECK(type->IsEqualTo(CStrVector("osr")));
+ isolate->runtime_profiler()->AttemptOnStackReplacement(*function);
+ unoptimized->set_allow_osr_at_loop_nesting_level(
+ Code::kMaxLoopNestingMarker);
+ }
+
return isolate->heap()->undefined_value();
}
@@ -8602,7 +8257,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
if (FLAG_always_opt) {
return Smi::FromInt(3); // 3 == "always".
}
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
return function->IsOptimized() ? Smi::FromInt(1) // 1 == "yes".
: Smi::FromInt(2); // 2 == "no".
}
@@ -8611,7 +8266,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
return Smi::FromInt(function->shared()->opt_count());
}
@@ -8619,7 +8274,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
// We're not prepared to handle a function with arguments object.
ASSERT(!function->shared()->uses_arguments());
@@ -8705,8 +8360,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
function->PrintName();
PrintF("]\n");
}
- StackCheckStub check_stub;
- Handle<Code> check_code = check_stub.GetCode();
+ Handle<Code> check_code;
+ if (FLAG_count_based_interrupts) {
+ InterruptStub interrupt_stub;
+ check_code = interrupt_stub.GetCode();
+ } else // NOLINT
+ { // NOLINT
+ StackCheckStub check_stub;
+ check_code = check_stub.GetCode();
+ }
Handle<Code> replacement_code = isolate->builtins()->OnStackReplacement();
Deoptimizer::RevertStackCheckCode(*unoptimized,
*check_code,
@@ -8736,19 +8398,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CheckIsBootstrapping) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_Call) {
HandleScope scope(isolate);
- ASSERT(args.length() == 5);
- CONVERT_CHECKED(JSReceiver, fun, args[0]);
- Object* receiver = args[1];
- CONVERT_CHECKED(JSObject, arguments, args[2]);
- CONVERT_CHECKED(Smi, shift, args[3]);
- CONVERT_CHECKED(Smi, arity, args[4]);
-
- int offset = shift->value();
- int argc = arity->value();
- ASSERT(offset >= 0);
- ASSERT(argc >= 0);
+ ASSERT(args.length() >= 2);
+ int argc = args.length() - 2;
+ CONVERT_ARG_CHECKED(JSReceiver, fun, argc + 1);
+ Object* receiver = args[0];
// If there are too many arguments, allocate argv via malloc.
const int argv_small_size = 10;
@@ -8762,7 +8417,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) {
}
for (int i = 0; i < argc; ++i) {
- MaybeObject* maybe = arguments->GetElement(offset + i);
+ MaybeObject* maybe = args[1 + i];
Object* object;
if (!maybe->To<Object>(&object)) return maybe;
argv[i] = Handle<Object>(object);
@@ -8779,6 +8434,41 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) {
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_Apply) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 5);
+ CONVERT_ARG_HANDLE_CHECKED(JSReceiver, fun, 0);
+ Handle<Object> receiver = args.at<Object>(1);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, arguments, 2);
+ CONVERT_SMI_ARG_CHECKED(offset, 3);
+ CONVERT_SMI_ARG_CHECKED(argc, 4);
+ ASSERT(offset >= 0);
+ ASSERT(argc >= 0);
+
+ // If there are too many arguments, allocate argv via malloc.
+ const int argv_small_size = 10;
+ Handle<Object> argv_small_buffer[argv_small_size];
+ SmartArrayPointer<Handle<Object> > argv_large_buffer;
+ Handle<Object>* argv = argv_small_buffer;
+ if (argc > argv_small_size) {
+ argv = new Handle<Object>[argc];
+ if (argv == NULL) return isolate->StackOverflow();
+ argv_large_buffer = SmartArrayPointer<Handle<Object> >(argv);
+ }
+
+ for (int i = 0; i < argc; ++i) {
+ argv[i] = Object::GetElement(arguments, offset + i);
+ }
+
+ bool threw;
+ Handle<Object> result =
+ Execution::Call(fun, receiver, argc, argv, &threw, true);
+
+ if (threw) return Failure::Exception();
+ return *result;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionDelegate) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
@@ -8799,8 +8489,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewFunctionContext) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, function, args[0]);
- int length = function->shared()->scope_info()->NumberOfContextSlots();
+ CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ int length = function->shared()->scope_info()->ContextLength();
Object* result;
{ MaybeObject* maybe_result =
isolate->heap()->AllocateFunctionContext(length, function);
@@ -8886,7 +8576,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_PushCatchContext) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_PushBlockContext) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
- SerializedScopeInfo* scope_info = SerializedScopeInfo::cast(args[0]);
+ ScopeInfo* scope_info = ScopeInfo::cast(args[0]);
JSFunction* function;
if (args[1]->IsSmi()) {
// A smi sentinel indicates a context nested inside global code rather
@@ -8911,8 +8601,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteContextSlot) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(Context, context, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(Context, context, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
int index;
PropertyAttributes attributes;
@@ -9104,9 +8794,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreContextSlot) {
ASSERT(args.length() == 4);
Handle<Object> value(args[0], isolate);
- CONVERT_ARG_CHECKED(Context, context, 1);
- CONVERT_ARG_CHECKED(String, name, 2);
- CONVERT_STRICT_MODE_ARG(strict_mode, 3);
+ CONVERT_ARG_HANDLE_CHECKED(Context, context, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 2);
+ CONVERT_LANGUAGE_MODE_ARG(language_mode, 3);
+ StrictModeFlag strict_mode = (language_mode == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
int index;
PropertyAttributes attributes;
@@ -9171,7 +8863,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StoreContextSlot) {
(object->GetLocalPropertyAttribute(*name) == ABSENT)) {
RETURN_IF_EMPTY_HANDLE(
isolate,
- SetProperty(object, name, value, NONE, strict_mode));
+ JSReceiver::SetProperty(object, name, value, NONE, strict_mode));
} else if (strict_mode == kStrictMode && (attributes & READ_ONLY) != 0) {
// Setting read only property in strict mode.
Handle<Object> error =
@@ -9226,7 +8918,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
return isolate->StackOverflow();
}
- return Execution::HandleStackGuardInterrupt();
+ return Execution::HandleStackGuardInterrupt(isolate);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_Interrupt) {
+ ASSERT(args.length() == 0);
+ return Execution::HandleStackGuardInterrupt(isolate);
}
@@ -9259,22 +8957,6 @@ static void PrintTransition(Object* result) {
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceElementsKindTransition) {
- ASSERT(args.length() == 5);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
- CONVERT_SMI_ARG_CHECKED(from_kind, 1);
- CONVERT_ARG_CHECKED(FixedArrayBase, from_elements, 2);
- CONVERT_SMI_ARG_CHECKED(to_kind, 3);
- CONVERT_ARG_CHECKED(FixedArrayBase, to_elements, 4);
- NoHandleAllocation ha;
- PrintF("*");
- obj->PrintElementsTransition(stdout,
- static_cast<ElementsKind>(from_kind), *from_elements,
- static_cast<ElementsKind>(to_kind), *to_elements);
- return isolate->heap()->undefined_value();
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_TraceEnter) {
ASSERT(args.length() == 0);
NoHandleAllocation ha;
@@ -9346,13 +9028,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateParseString) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(String, str, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, str, 0);
FlattenString(str);
- CONVERT_ARG_CHECKED(JSArray, output, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, output, 1);
MaybeObject* maybe_result_array =
- output->EnsureCanContainNonSmiElements();
+ output->EnsureCanContainHeapObjectElements();
if (maybe_result_array->IsFailure()) return maybe_result_array;
RUNTIME_ASSERT(output->HasFastElements());
@@ -9386,25 +9068,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DateLocalTimezone) {
ASSERT(args.length() == 1);
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
- const char* zone = OS::LocalTimezone(x);
+ int64_t time = isolate->date_cache()->EquivalentTime(static_cast<int64_t>(x));
+ const char* zone = OS::LocalTimezone(static_cast<double>(time));
return isolate->heap()->AllocateStringFromUtf8(CStrVector(zone));
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateLocalTimeOffset) {
- NoHandleAllocation ha;
- ASSERT(args.length() == 0);
-
- return isolate->heap()->NumberFromDouble(OS::LocalTimeOffset());
-}
-
-
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DateDaylightSavingsOffset) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DateToUTC) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
CONVERT_DOUBLE_ARG_CHECKED(x, 0);
- return isolate->heap()->NumberFromDouble(OS::DaylightSavingsOffset(x));
+ int64_t time = isolate->date_cache()->ToUTC(static_cast<int64_t>(x));
+
+ return isolate->heap()->NumberFromDouble(static_cast<double>(time));
}
@@ -9419,10 +9096,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalReceiver) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) {
HandleScope scope(isolate);
ASSERT_EQ(1, args.length());
- CONVERT_ARG_CHECKED(String, source, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
source = Handle<String>(source->TryFlattenGetString());
- // Optimized fast case where we only have ascii characters.
+ // Optimized fast case where we only have ASCII characters.
Handle<Object> result;
if (source->IsSeqAsciiString()) {
result = JsonParser<true>::Parse(source);
@@ -9440,44 +9117,40 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ParseJson) {
bool CodeGenerationFromStringsAllowed(Isolate* isolate,
Handle<Context> context) {
- if (context->allow_code_gen_from_strings()->IsFalse()) {
- // Check with callback if set.
- AllowCodeGenerationFromStringsCallback callback =
- isolate->allow_code_gen_callback();
- if (callback == NULL) {
- // No callback set and code generation disallowed.
- return false;
- } else {
- // Callback set. Let it decide if code generation is allowed.
- VMState state(isolate, EXTERNAL);
- return callback(v8::Utils::ToLocal(context));
- }
+ ASSERT(context->allow_code_gen_from_strings()->IsFalse());
+ // Check with callback if set.
+ AllowCodeGenerationFromStringsCallback callback =
+ isolate->allow_code_gen_callback();
+ if (callback == NULL) {
+ // No callback set and code generation disallowed.
+ return false;
+ } else {
+ // Callback set. Let it decide if code generation is allowed.
+ VMState state(isolate, EXTERNAL);
+ return callback(v8::Utils::ToLocal(context));
}
- return true;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) {
HandleScope scope(isolate);
ASSERT_EQ(1, args.length());
- CONVERT_ARG_CHECKED(String, source, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 0);
// Extract global context.
Handle<Context> context(isolate->context()->global_context());
// Check if global context allows code generation from
// strings. Throw an exception if it doesn't.
- if (!CodeGenerationFromStringsAllowed(isolate, context)) {
+ if (context->allow_code_gen_from_strings()->IsFalse() &&
+ !CodeGenerationFromStringsAllowed(isolate, context)) {
return isolate->Throw(*isolate->factory()->NewError(
"code_gen_from_strings", HandleVector<Object>(NULL, 0)));
}
// Compile source string in the global context.
- Handle<SharedFunctionInfo> shared = Compiler::CompileEval(source,
- context,
- true,
- kNonStrictMode,
- false);
+ Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
+ source, context, true, CLASSIC_MODE, RelocInfo::kNoPosition);
if (shared.is_null()) return Failure::Exception();
Handle<JSFunction> fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
@@ -9490,14 +9163,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) {
static ObjectPair CompileGlobalEval(Isolate* isolate,
Handle<String> source,
Handle<Object> receiver,
- StrictModeFlag strict_mode,
- bool qml_mode) {
+ LanguageMode language_mode,
+ int scope_position) {
Handle<Context> context = Handle<Context>(isolate->context());
Handle<Context> global_context = Handle<Context>(context->global_context());
// Check if global context allows code generation from
// strings. Throw an exception if it doesn't.
- if (!CodeGenerationFromStringsAllowed(isolate, global_context)) {
+ if (global_context->allow_code_gen_from_strings()->IsFalse() &&
+ !CodeGenerationFromStringsAllowed(isolate, global_context)) {
isolate->Throw(*isolate->factory()->NewError(
"code_gen_from_strings", HandleVector<Object>(NULL, 0)));
return MakePair(Failure::Exception(), NULL);
@@ -9509,8 +9183,8 @@ static ObjectPair CompileGlobalEval(Isolate* isolate,
source,
Handle<Context>(isolate->context()),
context->IsGlobalContext(),
- strict_mode,
- qml_mode);
+ language_mode,
+ scope_position);
if (shared.is_null()) return MakePair(Failure::Exception(), NULL);
Handle<JSFunction> compiled =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
@@ -9535,13 +9209,13 @@ RUNTIME_FUNCTION(ObjectPair, Runtime_ResolvePossiblyDirectEval) {
return MakePair(*callee, isolate->heap()->the_hole_value());
}
- CONVERT_STRICT_MODE_ARG(strict_mode, 3);
+ CONVERT_LANGUAGE_MODE_ARG(language_mode, 3);
ASSERT(args[4]->IsSmi());
return CompileGlobalEval(isolate,
args.at<String>(1),
args.at<Object>(2),
- strict_mode,
- Smi::cast(args[4])->value());
+ language_mode,
+ args.smi_at(4));
}
@@ -9552,11 +9226,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetNewFunctionAttributes) {
// as specified in ECMA262, 15.3.5.2.
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, func, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
- Handle<Map> map = func->shared()->strict_mode()
- ? isolate->strict_mode_function_instance_map()
- : isolate->function_instance_map();
+ Handle<Map> map = func->shared()->is_classic_mode()
+ ? isolate->function_instance_map()
+ : isolate->strict_mode_function_instance_map();
ASSERT(func->map()->instance_type() == map->instance_type());
ASSERT(func->map()->instance_size() == map->instance_size());
@@ -9570,7 +9244,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) {
// Use as fallback for allocation in generated code when NewSpace
// is full.
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(Smi, size_smi, 0);
+ CONVERT_ARG_HANDLE_CHECKED(Smi, size_smi, 0);
int size = size_smi->value();
RUNTIME_ASSERT(IsAligned(size, kPointerSize));
RUNTIME_ASSERT(size > 0);
@@ -9592,8 +9266,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllocateInNewSpace) {
// false otherwise.
RUNTIME_FUNCTION(MaybeObject*, Runtime_PushIfAbsent) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSArray, array, args[0]);
- CONVERT_CHECKED(JSObject, element, args[1]);
+ CONVERT_ARG_CHECKED(JSArray, array, 0);
+ CONVERT_ARG_CHECKED(JSObject, element, 1);
RUNTIME_ASSERT(array->HasFastElements() || array->HasFastSmiOnlyElements());
int length = Smi::cast(array->length())->value();
FixedArray* elements = FixedArray::cast(array->elements());
@@ -9653,8 +9327,9 @@ class ArrayConcatVisitor {
// Fall-through to dictionary mode.
}
ASSERT(!fast_elements_);
- Handle<NumberDictionary> dict(NumberDictionary::cast(*storage_));
- Handle<NumberDictionary> result =
+ Handle<SeededNumberDictionary> dict(
+ SeededNumberDictionary::cast(*storage_));
+ Handle<SeededNumberDictionary> result =
isolate_->factory()->DictionaryAtNumberPut(dict, index, elm);
if (!result.is_identical_to(dict)) {
// Dictionary needed to grow.
@@ -9694,14 +9369,15 @@ class ArrayConcatVisitor {
void SetDictionaryMode(uint32_t index) {
ASSERT(fast_elements_);
Handle<FixedArray> current_storage(*storage_);
- Handle<NumberDictionary> slow_storage(
- isolate_->factory()->NewNumberDictionary(current_storage->length()));
+ Handle<SeededNumberDictionary> slow_storage(
+ isolate_->factory()->NewSeededNumberDictionary(
+ current_storage->length()));
uint32_t current_length = static_cast<uint32_t>(current_storage->length());
for (uint32_t i = 0; i < current_length; i++) {
HandleScope loop_scope;
Handle<Object> element(current_storage->get(i));
if (!element->IsTheHole()) {
- Handle<NumberDictionary> new_storage =
+ Handle<SeededNumberDictionary> new_storage =
isolate_->factory()->DictionaryAtNumberPut(slow_storage, i, element);
if (!new_storage.is_identical_to(slow_storage)) {
slow_storage = loop_scope.CloseAndEscape(new_storage);
@@ -9736,6 +9412,7 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
uint32_t length = static_cast<uint32_t>(array->length()->Number());
int element_count = 0;
switch (array->GetElementsKind()) {
+ case FAST_SMI_ONLY_ELEMENTS:
case FAST_ELEMENTS: {
// Fast elements can't have lengths that are not representable by
// a 32-bit signed integer.
@@ -9747,9 +9424,13 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
}
break;
}
+ case FAST_DOUBLE_ELEMENTS:
+ // TODO(1810): Decide if it's worthwhile to implement this.
+ UNREACHABLE();
+ break;
case DICTIONARY_ELEMENTS: {
- Handle<NumberDictionary> dictionary(
- NumberDictionary::cast(array->elements()));
+ Handle<SeededNumberDictionary> dictionary(
+ SeededNumberDictionary::cast(array->elements()));
int capacity = dictionary->Capacity();
for (int i = 0; i < capacity; i++) {
Handle<Object> key(dictionary->KeyAt(i));
@@ -9759,7 +9440,16 @@ static uint32_t EstimateElementCount(Handle<JSArray> array) {
}
break;
}
- default:
+ case NON_STRICT_ARGUMENTS_ELEMENTS:
+ case EXTERNAL_BYTE_ELEMENTS:
+ case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case EXTERNAL_SHORT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case EXTERNAL_INT_ELEMENTS:
+ case EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case EXTERNAL_FLOAT_ELEMENTS:
+ case EXTERNAL_DOUBLE_ELEMENTS:
+ case EXTERNAL_PIXEL_ELEMENTS:
// External arrays are always dense.
return length;
}
@@ -9837,8 +9527,14 @@ static void CollectElementIndices(Handle<JSObject> object,
}
break;
}
+ case FAST_DOUBLE_ELEMENTS: {
+ // TODO(1810): Decide if it's worthwhile to implement this.
+ UNREACHABLE();
+ break;
+ }
case DICTIONARY_ELEMENTS: {
- Handle<NumberDictionary> dict(NumberDictionary::cast(object->elements()));
+ Handle<SeededNumberDictionary> dict(
+ SeededNumberDictionary::cast(object->elements()));
uint32_t capacity = dict->Capacity();
for (uint32_t j = 0; j < capacity; j++) {
HandleScope loop_scope;
@@ -9967,8 +9663,13 @@ static bool IterateElements(Isolate* isolate,
}
break;
}
+ case FAST_DOUBLE_ELEMENTS: {
+ // TODO(1810): Decide if it's worthwhile to implement this.
+ UNREACHABLE();
+ break;
+ }
case DICTIONARY_ELEMENTS: {
- Handle<NumberDictionary> dict(receiver->element_dictionary());
+ Handle<SeededNumberDictionary> dict(receiver->element_dictionary());
List<uint32_t> indices(dict->Capacity() / 2);
// Collect all indices in the object and the prototypes less
// than length. This might introduce duplicates in the indices list.
@@ -10057,7 +9758,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
ASSERT(args.length() == 1);
HandleScope handle_scope(isolate);
- CONVERT_ARG_CHECKED(JSArray, arguments, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 0);
int argument_count = static_cast<int>(arguments->length()->Number());
RUNTIME_ASSERT(arguments->HasFastElements());
Handle<FixedArray> elements(FixedArray::cast(arguments->elements()));
@@ -10077,6 +9778,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
uint32_t element_estimate;
if (obj->IsJSArray()) {
Handle<JSArray> array(Handle<JSArray>::cast(obj));
+ // TODO(1810): Find out if it's worthwhile to properly support
+ // arbitrary ElementsKinds. For now, pessimistically transition to
+ // FAST_ELEMENTS.
+ if (array->HasFastDoubleElements()) {
+ array = Handle<JSArray>::cast(
+ JSObject::TransitionElementsKind(array, FAST_ELEMENTS));
+ }
length_estimate =
static_cast<uint32_t>(array->length()->Number());
element_estimate =
@@ -10117,7 +9825,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayConcat) {
uint32_t at_least_space_for = estimate_nof_elements +
(estimate_nof_elements >> 2);
storage = Handle<FixedArray>::cast(
- isolate->factory()->NewNumberDictionary(at_least_space_for));
+ isolate->factory()->NewSeededNumberDictionary(at_least_space_for));
}
ArrayConcatVisitor visitor(isolate, storage, fast_case);
@@ -10145,7 +9853,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalPrint) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, string, args[0]);
+ CONVERT_ARG_CHECKED(String, string, 0);
StringInputBuffer buffer(string);
while (buffer.has_more()) {
uint16_t character = buffer.GetNext();
@@ -10161,7 +9869,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GlobalPrint) {
// Returns the number of non-undefined elements collected.
RUNTIME_FUNCTION(MaybeObject*, Runtime_RemoveArrayHoles) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSObject, object, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, object, 0);
CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]);
return object->PrepareElementsForSort(limit);
}
@@ -10170,8 +9878,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RemoveArrayHoles) {
// Move contents of argument 0 (an array) to argument 1 (an array)
RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSArray, from, args[0]);
- CONVERT_CHECKED(JSArray, to, args[1]);
+ CONVERT_ARG_CHECKED(JSArray, from, 0);
+ CONVERT_ARG_CHECKED(JSArray, to, 1);
FixedArrayBase* new_elements = from->elements();
MaybeObject* maybe_new_map;
ElementsKind elements_kind;
@@ -10184,7 +9892,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
} else {
elements_kind = DICTIONARY_ELEMENTS;
}
- maybe_new_map = to->GetElementsTransitionMap(elements_kind);
+ maybe_new_map = to->GetElementsTransitionMap(isolate, elements_kind);
Object* new_map;
if (!maybe_new_map->ToObject(&new_map)) return maybe_new_map;
to->set_map(Map::cast(new_map));
@@ -10202,10 +9910,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
// How many elements does this object/array have?
RUNTIME_FUNCTION(MaybeObject*, Runtime_EstimateNumberOfElements) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSObject, object, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, object, 0);
HeapObject* elements = object->elements();
if (elements->IsDictionary()) {
- return Smi::FromInt(NumberDictionary::cast(elements)->NumberOfElements());
+ int result = SeededNumberDictionary::cast(elements)->NumberOfElements();
+ return Smi::FromInt(result);
} else if (object->IsJSArray()) {
return JSArray::cast(object)->length();
} else {
@@ -10219,7 +9928,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) {
ASSERT_EQ(3, args.length());
- CONVERT_ARG_CHECKED(JSObject, object, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, object, 0);
Handle<Object> key1 = args.at<Object>(1);
Handle<Object> key2 = args.at<Object>(2);
@@ -10235,10 +9944,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) {
Handle<Object> tmp2 = Object::GetElement(jsobject, index2);
RETURN_IF_EMPTY_HANDLE(isolate, tmp2);
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetElement(jsobject, index1, tmp2, kStrictMode));
- RETURN_IF_EMPTY_HANDLE(isolate,
- SetElement(jsobject, index2, tmp1, kStrictMode));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate, JSObject::SetElement(jsobject, index1, tmp2, NONE, kStrictMode));
+ RETURN_IF_EMPTY_HANDLE(
+ isolate, JSObject::SetElement(jsobject, index2, tmp1, NONE, kStrictMode));
return isolate->heap()->undefined_value();
}
@@ -10252,7 +9961,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SwapElements) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSObject, array, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, array, 0);
CONVERT_NUMBER_CHECKED(uint32_t, length, Uint32, args[1]);
if (array->elements()->IsDictionary()) {
// Create an array and get all the keys into it, then remove all the
@@ -10291,37 +10000,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
}
-// DefineAccessor takes an optional final argument which is the
-// property attributes (eg, DONT_ENUM, DONT_DELETE). IMPORTANT: due
-// to the way accessors are implemented, it is set for both the getter
-// and setter on the first call to DefineAccessor and ignored on
-// subsequent calls.
-RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineAccessor) {
- RUNTIME_ASSERT(args.length() == 4 || args.length() == 5);
- // Compute attributes.
- PropertyAttributes attributes = NONE;
- if (args.length() == 5) {
- CONVERT_CHECKED(Smi, attrs, args[4]);
- int value = attrs->value();
- // Only attribute bits should be set.
- ASSERT((value & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0);
- attributes = static_cast<PropertyAttributes>(value);
- }
-
- CONVERT_CHECKED(JSObject, obj, args[0]);
- CONVERT_CHECKED(String, name, args[1]);
- CONVERT_CHECKED(Smi, flag, args[2]);
- CONVERT_CHECKED(JSFunction, fun, args[3]);
- return obj->DefineAccessor(name, flag->value() == 0, fun, attributes);
-}
-
-
RUNTIME_FUNCTION(MaybeObject*, Runtime_LookupAccessor) {
ASSERT(args.length() == 3);
- CONVERT_CHECKED(JSObject, obj, args[0]);
- CONVERT_CHECKED(String, name, args[1]);
- CONVERT_CHECKED(Smi, flag, args[2]);
- return obj->LookupAccessor(name, flag->value() == 0);
+ CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_CHECKED(String, name, 1);
+ CONVERT_SMI_ARG_CHECKED(flag, 2);
+ AccessorComponent component = flag == 0 ? ACCESSOR_GETTER : ACCESSOR_SETTER;
+ return obj->LookupAccessor(name, component);
}
@@ -10339,8 +10024,8 @@ static Smi* WrapFrameId(StackFrame::Id id) {
}
-static StackFrame::Id UnwrapFrameId(Smi* wrapped) {
- return static_cast<StackFrame::Id>(wrapped->value() << 2);
+static StackFrame::Id UnwrapFrameId(int wrapped) {
+ return static_cast<StackFrame::Id>(wrapped << 2);
}
@@ -10417,10 +10102,11 @@ static MaybeObject* DebugLookupResultValue(Heap* heap,
case CONSTANT_TRANSITION:
case NULL_DESCRIPTOR:
return heap->undefined_value();
- default:
+ case HANDLER:
UNREACHABLE();
+ return heap->undefined_value();
}
- UNREACHABLE();
+ UNREACHABLE(); // keep the compiler happy
return heap->undefined_value();
}
@@ -10442,8 +10128,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
// Make sure to set the current context to the context before the debugger was
// entered (if the debugger is entered). The reason for switching context here
@@ -10513,15 +10199,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPropertyDetails) {
// If the callback object is a fixed array then it contains JavaScript
// getter and/or setter.
bool hasJavaScriptAccessors = result_type == CALLBACKS &&
- result_callback_obj->IsFixedArray();
+ result_callback_obj->IsAccessorPair();
Handle<FixedArray> details =
isolate->factory()->NewFixedArray(hasJavaScriptAccessors ? 5 : 2);
details->set(0, *value);
details->set(1, property_details);
if (hasJavaScriptAccessors) {
+ AccessorPair* accessors = AccessorPair::cast(*result_callback_obj);
details->set(2, isolate->heap()->ToBoolean(caught_exception));
- details->set(3, FixedArray::cast(*result_callback_obj)->get(0));
- details->set(4, FixedArray::cast(*result_callback_obj)->get(1));
+ details->set(3, accessors->GetComponent(ACCESSOR_GETTER));
+ details->set(4, accessors->GetComponent(ACCESSOR_SETTER));
}
return *isolate->factory()->NewJSArrayWithElements(details);
@@ -10540,8 +10227,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) {
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
- CONVERT_ARG_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
LookupResult result(isolate);
obj->Lookup(*name, &result);
@@ -10556,9 +10243,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetProperty) {
// args[0]: smi with property details.
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyTypeFromDetails) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(Smi, details, args[0]);
- PropertyType type = PropertyDetails(details).type();
- return Smi::FromInt(static_cast<int>(type));
+ CONVERT_PROPERTY_DETAILS_CHECKED(details, 0);
+ return Smi::FromInt(static_cast<int>(details.type()));
}
@@ -10566,9 +10252,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyTypeFromDetails) {
// args[0]: smi with property details.
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyAttributesFromDetails) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(Smi, details, args[0]);
- PropertyAttributes attributes = PropertyDetails(details).attributes();
- return Smi::FromInt(static_cast<int>(attributes));
+ CONVERT_PROPERTY_DETAILS_CHECKED(details, 0);
+ return Smi::FromInt(static_cast<int>(details.attributes()));
}
@@ -10576,9 +10261,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyAttributesFromDetails) {
// args[0]: smi with property details.
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyIndexFromDetails) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(Smi, details, args[0]);
- int index = PropertyDetails(details).index();
- return Smi::FromInt(index);
+ CONVERT_PROPERTY_DETAILS_CHECKED(details, 0);
+ return Smi::FromInt(details.index());
}
@@ -10588,9 +10272,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugPropertyIndexFromDetails) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugNamedInterceptorPropertyValue) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
RUNTIME_ASSERT(obj->HasNamedInterceptor());
- CONVERT_ARG_CHECKED(String, name, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, name, 1);
PropertyAttributes attributes;
return obj->GetPropertyWithInterceptor(*obj, *name, &attributes);
@@ -10603,7 +10287,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugNamedInterceptorPropertyValue) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugIndexedInterceptorElementValue) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
- CONVERT_ARG_CHECKED(JSObject, obj, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
RUNTIME_ASSERT(obj->HasIndexedInterceptor());
CONVERT_NUMBER_CHECKED(uint32_t, index, Uint32, args[1]);
@@ -10654,15 +10338,16 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameCount) {
class FrameInspector {
public:
FrameInspector(JavaScriptFrame* frame,
- int inlined_frame_index,
+ int inlined_jsframe_index,
Isolate* isolate)
: frame_(frame), deoptimized_frame_(NULL), isolate_(isolate) {
// Calculate the deoptimized frame.
if (frame->is_optimized()) {
deoptimized_frame_ = Deoptimizer::DebuggerInspectableFrame(
- frame, inlined_frame_index, isolate);
+ frame, inlined_jsframe_index, isolate);
}
has_adapted_arguments_ = frame_->has_adapted_arguments();
+ is_bottommost_ = inlined_jsframe_index == 0;
is_optimized_ = frame_->is_optimized();
}
@@ -10695,6 +10380,16 @@ class FrameInspector {
? deoptimized_frame_->GetExpression(index)
: frame_->GetExpression(index);
}
+ int GetSourcePosition() {
+ return is_optimized_
+ ? deoptimized_frame_->GetSourcePosition()
+ : frame_->LookupCode()->SourcePosition(frame_->pc());
+ }
+ bool IsConstructor() {
+ return is_optimized_ && !is_bottommost_
+ ? deoptimized_frame_->HasConstructStub()
+ : frame_->IsConstructor();
+ }
// To inspect all the provided arguments the frame might need to be
// replaced with the arguments frame.
@@ -10710,6 +10405,7 @@ class FrameInspector {
DeoptimizedFrameInfo* deoptimized_frame_;
Isolate* isolate_;
bool is_optimized_;
+ bool is_bottommost_;
bool has_adapted_arguments_;
DISALLOW_COPY_AND_ASSIGN(FrameInspector);
@@ -10776,8 +10472,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
return heap->undefined_value();
}
- int inlined_frame_index = 0; // Inlined frame index in optimized frame.
-
int count = 0;
JavaScriptFrameIterator it(isolate, id);
for (; !it.done(); it.Advance()) {
@@ -10786,11 +10480,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
}
if (it.done()) return heap->undefined_value();
- if (it.frame()->is_optimized()) {
- inlined_frame_index =
+ bool is_optimized = it.frame()->is_optimized();
+
+ int inlined_jsframe_index = 0; // Inlined frame index in optimized frame.
+ if (is_optimized) {
+ inlined_jsframe_index =
it.frame()->GetInlineCount() - (index - count) - 1;
}
- FrameInspector frame_inspector(it.frame(), inlined_frame_index, isolate);
+ FrameInspector frame_inspector(it.frame(), inlined_jsframe_index, isolate);
// Traverse the saved contexts chain to find the active context for the
// selected frame.
@@ -10799,21 +10496,17 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Get the frame id.
Handle<Object> frame_id(WrapFrameId(it.frame()->id()), isolate);
- // Find source position.
- int position =
- it.frame()->LookupCode()->SourcePosition(it.frame()->pc());
+ // Find source position in unoptimized code.
+ int position = frame_inspector.GetSourcePosition();
- // Check for constructor frame. Inlined frames cannot be construct calls.
- bool inlined_frame =
- it.frame()->is_optimized() && inlined_frame_index != 0;
- bool constructor = !inlined_frame && it.frame()->IsConstructor();
+ // Check for constructor frame.
+ bool constructor = frame_inspector.IsConstructor();
// Get scope info and read from it for local variable information.
- Handle<JSFunction> function(JSFunction::cast(it.frame()->function()));
+ Handle<JSFunction> function(JSFunction::cast(frame_inspector.GetFunction()));
Handle<SharedFunctionInfo> shared(function->shared());
- Handle<SerializedScopeInfo> scope_info(shared->scope_info());
- ASSERT(*scope_info != SerializedScopeInfo::Empty());
- ScopeInfo<> info(*scope_info);
+ Handle<ScopeInfo> scope_info(shared->scope_info());
+ ASSERT(*scope_info != ScopeInfo::Empty());
// Get the locals names and values into a temporary array.
//
@@ -10821,31 +10514,33 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// (e.g. .result)? For users of the debugger, they will probably be
// confusing.
Handle<FixedArray> locals =
- isolate->factory()->NewFixedArray(info.NumberOfLocals() * 2);
+ isolate->factory()->NewFixedArray(scope_info->LocalCount() * 2);
// Fill in the values of the locals.
int i = 0;
- for (; i < info.number_of_stack_slots(); ++i) {
+ for (; i < scope_info->StackLocalCount(); ++i) {
// Use the value from the stack.
- locals->set(i * 2, *info.LocalName(i));
+ locals->set(i * 2, scope_info->LocalName(i));
locals->set(i * 2 + 1, frame_inspector.GetExpression(i));
}
- if (i < info.NumberOfLocals()) {
+ if (i < scope_info->LocalCount()) {
// Get the context containing declarations.
Handle<Context> context(
Context::cast(it.frame()->context())->declaration_context());
- for (; i < info.NumberOfLocals(); ++i) {
- Handle<String> name = info.LocalName(i);
+ for (; i < scope_info->LocalCount(); ++i) {
+ Handle<String> name(scope_info->LocalName(i));
+ VariableMode mode;
+ InitializationFlag init_flag;
locals->set(i * 2, *name);
- locals->set(i * 2 + 1,
- context->get(scope_info->ContextSlotIndex(*name, NULL)));
+ locals->set(i * 2 + 1, context->get(
+ scope_info->ContextSlotIndex(*name, &mode, &init_flag)));
}
}
// Check whether this frame is positioned at return. If not top
// frame or if the frame is optimized it cannot be at a return.
bool at_return = false;
- if (!it.frame()->is_optimized() && index == 0) {
+ if (!is_optimized && index == 0) {
at_return = isolate->debug()->IsBreakAtReturn(it.frame());
}
@@ -10885,26 +10580,21 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// the provided parameters whereas the function frame always have the number
// of arguments matching the functions parameters. The rest of the
// information (except for what is collected above) is the same.
- if (it.frame()->has_adapted_arguments()) {
+ if ((inlined_jsframe_index == 0) && it.frame()->has_adapted_arguments()) {
it.AdvanceToArgumentsFrame();
frame_inspector.SetArgumentsFrame(it.frame());
}
// Find the number of arguments to fill. At least fill the number of
// parameters for the function and fill more if more parameters are provided.
- int argument_count = info.number_of_parameters();
+ int argument_count = scope_info->ParameterCount();
if (argument_count < frame_inspector.GetParametersCount()) {
argument_count = frame_inspector.GetParametersCount();
}
-#ifdef DEBUG
- if (it.frame()->is_optimized()) {
- ASSERT_EQ(argument_count, frame_inspector.GetParametersCount());
- }
-#endif
// Calculate the size of the result.
int details_size = kFrameDetailsFirstDynamicIndex +
- 2 * (argument_count + info.NumberOfLocals()) +
+ 2 * (argument_count + scope_info->LocalCount()) +
(at_return ? 1 : 0);
Handle<FixedArray> details = isolate->factory()->NewFixedArray(details_size);
@@ -10919,7 +10609,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Add the locals count
details->set(kFrameDetailsLocalCountIndex,
- Smi::FromInt(info.NumberOfLocals()));
+ Smi::FromInt(scope_info->LocalCount()));
// Add the source position.
if (position != RelocInfo::kNoPosition) {
@@ -10942,9 +10632,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
if (*save->context() == *isolate->debug()->debug_context()) {
flags |= 1 << 0;
}
- if (it.frame()->is_optimized()) {
+ if (is_optimized) {
flags |= 1 << 1;
- flags |= inlined_frame_index << 2;
+ flags |= inlined_jsframe_index << 2;
}
details->set(kFrameDetailsFlagsIndex, Smi::FromInt(flags));
@@ -10954,14 +10644,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Add arguments name and value.
for (int i = 0; i < argument_count; i++) {
// Name of the argument.
- if (i < info.number_of_parameters()) {
- details->set(details_index++, *info.parameter_name(i));
+ if (i < scope_info->ParameterCount()) {
+ details->set(details_index++, scope_info->ParameterName(i));
} else {
details->set(details_index++, heap->undefined_value());
}
// Parameter value.
- if (i < it.frame()->ComputeParametersCount()) {
+ if (i < frame_inspector.GetParametersCount()) {
// Get the value from the stack.
details->set(details_index++, frame_inspector.GetParameter(i));
} else {
@@ -10970,7 +10660,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
}
// Add locals name and value from the temporary copy from the function frame.
- for (int i = 0; i < info.NumberOfLocals() * 2; i++) {
+ for (int i = 0; i < scope_info->LocalCount() * 2; i++) {
details->set(details_index++, locals->get(i));
}
@@ -10983,7 +10673,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// THIS MUST BE DONE LAST SINCE WE MIGHT ADVANCE
// THE FRAME ITERATOR TO WRAP THE RECEIVER.
Handle<Object> receiver(it.frame()->receiver(), isolate);
- if (!receiver->IsJSObject() && !shared->strict_mode() && !shared->native()) {
+ if (!receiver->IsJSObject() &&
+ shared->is_classic_mode() &&
+ !shared->native()) {
// If the receiver is not a JSObject and the function is not a
// builtin or strict-mode we have hit an optimization where a
// value object is not converted into a wrapped JS objects. To
@@ -11006,21 +10698,20 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFrameDetails) {
// Copy all the context locals into an object used to materialize a scope.
static bool CopyContextLocalsToScopeObject(
Isolate* isolate,
- Handle<SerializedScopeInfo> serialized_scope_info,
- ScopeInfo<>& scope_info,
+ Handle<ScopeInfo> scope_info,
Handle<Context> context,
Handle<JSObject> scope_object) {
// Fill all context locals to the context extension.
- for (int i = Context::MIN_CONTEXT_SLOTS;
- i < scope_info.number_of_context_slots();
- i++) {
- int context_index = serialized_scope_info->ContextSlotIndex(
- *scope_info.context_slot_name(i), NULL);
+ for (int i = 0; i < scope_info->ContextLocalCount(); i++) {
+ VariableMode mode;
+ InitializationFlag init_flag;
+ int context_index = scope_info->ContextSlotIndex(
+ scope_info->ContextLocalName(i), &mode, &init_flag);
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(scope_object,
- scope_info.context_slot_name(i),
+ Handle<String>(scope_info->ContextLocalName(i)),
Handle<Object>(context->get(context_index), isolate),
NONE,
kNonStrictMode),
@@ -11033,15 +10724,13 @@ static bool CopyContextLocalsToScopeObject(
// Create a plain JSObject which materializes the local scope for the specified
// frame.
-static Handle<JSObject> MaterializeLocalScope(
+static Handle<JSObject> MaterializeLocalScopeWithFrameInspector(
Isolate* isolate,
JavaScriptFrame* frame,
- int inlined_frame_index) {
- Handle<JSFunction> function(JSFunction::cast(frame->function()));
+ FrameInspector* frame_inspector) {
+ Handle<JSFunction> function(JSFunction::cast(frame_inspector->GetFunction()));
Handle<SharedFunctionInfo> shared(function->shared());
- Handle<SerializedScopeInfo> serialized_scope_info(shared->scope_info());
- ScopeInfo<> scope_info(*serialized_scope_info);
- FrameInspector frame_inspector(frame, inlined_frame_index, isolate);
+ Handle<ScopeInfo> scope_info(shared->scope_info());
// Allocate and initialize a JSObject with all the arguments, stack locals
// heap locals and extension properties of the debugged function.
@@ -11049,36 +10738,39 @@ static Handle<JSObject> MaterializeLocalScope(
isolate->factory()->NewJSObject(isolate->object_function());
// First fill all parameters.
- for (int i = 0; i < scope_info.number_of_parameters(); ++i) {
+ for (int i = 0; i < scope_info->ParameterCount(); ++i) {
+ Handle<Object> value(
+ i < frame_inspector->GetParametersCount() ?
+ frame_inspector->GetParameter(i) : isolate->heap()->undefined_value());
+
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(local_scope,
- scope_info.parameter_name(i),
- Handle<Object>(frame_inspector.GetParameter(i)),
+ Handle<String>(scope_info->ParameterName(i)),
+ value,
NONE,
kNonStrictMode),
Handle<JSObject>());
}
// Second fill all stack locals.
- for (int i = 0; i < scope_info.number_of_stack_slots(); ++i) {
+ for (int i = 0; i < scope_info->StackLocalCount(); ++i) {
RETURN_IF_EMPTY_HANDLE_VALUE(
isolate,
SetProperty(local_scope,
- scope_info.stack_slot_name(i),
- Handle<Object>(frame_inspector.GetExpression(i)),
+ Handle<String>(scope_info->StackLocalName(i)),
+ Handle<Object>(frame_inspector->GetExpression(i)),
NONE,
kNonStrictMode),
Handle<JSObject>());
}
- if (scope_info.number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
+ if (scope_info->HasContext()) {
// Third fill all context locals.
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context(frame_context->declaration_context());
- if (!CopyContextLocalsToScopeObject(isolate,
- serialized_scope_info, scope_info,
- function_context, local_scope)) {
+ if (!CopyContextLocalsToScopeObject(
+ isolate, scope_info, function_context, local_scope)) {
return Handle<JSObject>();
}
@@ -11114,6 +10806,17 @@ static Handle<JSObject> MaterializeLocalScope(
}
+static Handle<JSObject> MaterializeLocalScope(
+ Isolate* isolate,
+ JavaScriptFrame* frame,
+ int inlined_jsframe_index) {
+ FrameInspector frame_inspector(frame, inlined_jsframe_index, isolate);
+ return MaterializeLocalScopeWithFrameInspector(isolate,
+ frame,
+ &frame_inspector);
+}
+
+
// Create a plain JSObject which materializes the closure content for the
// context.
static Handle<JSObject> MaterializeClosure(Isolate* isolate,
@@ -11121,18 +10824,16 @@ static Handle<JSObject> MaterializeClosure(Isolate* isolate,
ASSERT(context->IsFunctionContext());
Handle<SharedFunctionInfo> shared(context->closure()->shared());
- Handle<SerializedScopeInfo> serialized_scope_info(shared->scope_info());
- ScopeInfo<> scope_info(*serialized_scope_info);
+ Handle<ScopeInfo> scope_info(shared->scope_info());
- // Allocate and initialize a JSObject with all the content of theis function
+ // Allocate and initialize a JSObject with all the content of this function
// closure.
Handle<JSObject> closure_scope =
isolate->factory()->NewJSObject(isolate->object_function());
// Fill all context locals to the context extension.
- if (!CopyContextLocalsToScopeObject(isolate,
- serialized_scope_info, scope_info,
- context, closure_scope)) {
+ if (!CopyContextLocalsToScopeObject(
+ isolate, scope_info, context, closure_scope)) {
return Handle<JSObject>();
}
@@ -11187,9 +10888,7 @@ static Handle<JSObject> MaterializeBlockScope(
Isolate* isolate,
Handle<Context> context) {
ASSERT(context->IsBlockContext());
- Handle<SerializedScopeInfo> serialized_scope_info(
- SerializedScopeInfo::cast(context->extension()));
- ScopeInfo<> scope_info(*serialized_scope_info);
+ Handle<ScopeInfo> scope_info(ScopeInfo::cast(context->extension()));
// Allocate and initialize a JSObject with all the arguments, stack locals
// heap locals and extension properties of the debugged function.
@@ -11197,18 +10896,38 @@ static Handle<JSObject> MaterializeBlockScope(
isolate->factory()->NewJSObject(isolate->object_function());
// Fill all context locals.
- if (scope_info.number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
- if (!CopyContextLocalsToScopeObject(isolate,
- serialized_scope_info, scope_info,
- context, block_scope)) {
- return Handle<JSObject>();
- }
+ if (!CopyContextLocalsToScopeObject(
+ isolate, scope_info, context, block_scope)) {
+ return Handle<JSObject>();
}
return block_scope;
}
+// Create a plain JSObject which materializes the module scope for the specified
+// module context.
+static Handle<JSObject> MaterializeModuleScope(
+ Isolate* isolate,
+ Handle<Context> context) {
+ ASSERT(context->IsModuleContext());
+ Handle<ScopeInfo> scope_info(ScopeInfo::cast(context->extension()));
+
+ // Allocate and initialize a JSObject with all the members of the debugged
+ // module.
+ Handle<JSObject> module_scope =
+ isolate->factory()->NewJSObject(isolate->object_function());
+
+ // Fill all context locals.
+ if (!CopyContextLocalsToScopeObject(
+ isolate, scope_info, context, module_scope)) {
+ return Handle<JSObject>();
+ }
+
+ return module_scope;
+}
+
+
// Iterate over the actual scopes visible from a stack frame. The iteration
// proceeds from the innermost visible nested scope outwards. All scopes are
// backed by an actual context except the local scope, which is inserted
@@ -11221,21 +10940,23 @@ class ScopeIterator {
ScopeTypeWith,
ScopeTypeClosure,
ScopeTypeCatch,
- ScopeTypeBlock
+ ScopeTypeBlock,
+ ScopeTypeModule
};
ScopeIterator(Isolate* isolate,
JavaScriptFrame* frame,
- int inlined_frame_index)
+ int inlined_jsframe_index)
: isolate_(isolate),
frame_(frame),
- inlined_frame_index_(inlined_frame_index),
+ inlined_jsframe_index_(inlined_jsframe_index),
function_(JSFunction::cast(frame->function())),
context_(Context::cast(frame->context())),
nested_scope_chain_(4) {
// Catch the case when the debugger stops in an internal function.
Handle<SharedFunctionInfo> shared_info(function_->shared());
+ Handle<ScopeInfo> scope_info(shared_info->scope_info());
if (shared_info->script() == isolate->heap()->undefined_value()) {
while (context_->closure() == *function_) {
context_ = Handle<Context>(context_->previous(), isolate_);
@@ -11243,41 +10964,72 @@ class ScopeIterator {
return;
}
- // Check whether we are in global code or function code. If there is a stack
- // slot for .result then this function has been created for evaluating
- // global code and it is not a real function.
- // Checking for the existence of .result seems fragile, but the scope info
- // saved with the code object does not otherwise have that information.
- int index = shared_info->scope_info()->
- StackSlotIndex(isolate_->heap()->result_symbol());
-
- // Reparse the code and analyze the scopes.
- ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
- Handle<Script> script(Script::cast(shared_info->script()));
- Scope* scope;
- if (index >= 0) {
- // Global code
- CompilationInfo info(script);
- info.MarkAsGlobal();
- if (shared_info->qml_mode())
- info.MarkAsQmlMode();
- CHECK(ParserApi::Parse(&info));
- CHECK(Scope::Analyze(&info));
- scope = info.function()->scope();
- } else {
- // Function code
- CompilationInfo info(shared_info);
- if (shared_info->qml_mode())
- info.MarkAsQmlMode();
- CHECK(ParserApi::Parse(&info));
- CHECK(Scope::Analyze(&info));
- scope = info.function()->scope();
+ // Get the debug info (create it if it does not exist).
+ if (!isolate->debug()->EnsureDebugInfo(shared_info)) {
+ // Return if ensuring debug info failed.
+ return;
}
+ Handle<DebugInfo> debug_info = Debug::GetDebugInfo(shared_info);
+
+ // Find the break point where execution has stopped.
+ BreakLocationIterator break_location_iterator(debug_info,
+ ALL_BREAK_LOCATIONS);
+ break_location_iterator.FindBreakLocationFromAddress(frame->pc());
+ if (break_location_iterator.IsExit()) {
+ // We are within the return sequence. At the momemt it is not possible to
+ // get a source position which is consistent with the current scope chain.
+ // Thus all nested with, catch and block contexts are skipped and we only
+ // provide the function scope.
+ if (scope_info->HasContext()) {
+ context_ = Handle<Context>(context_->declaration_context(), isolate_);
+ } else {
+ while (context_->closure() == *function_) {
+ context_ = Handle<Context>(context_->previous(), isolate_);
+ }
+ }
+ if (scope_info->Type() != EVAL_SCOPE) nested_scope_chain_.Add(scope_info);
+ } else {
+ // Reparse the code and analyze the scopes.
+ ZoneScope zone_scope(isolate, DELETE_ON_EXIT);
+ Handle<Script> script(Script::cast(shared_info->script()));
+ Scope* scope = NULL;
+
+ // Check whether we are in global, eval or function code.
+ Handle<ScopeInfo> scope_info(shared_info->scope_info());
+ if (scope_info->Type() != FUNCTION_SCOPE) {
+ // Global or eval code.
+ CompilationInfo info(script);
+ if (scope_info->Type() == GLOBAL_SCOPE) {
+ info.MarkAsGlobal();
+ } else {
+ ASSERT(scope_info->Type() == EVAL_SCOPE);
+ info.MarkAsEval();
+ info.SetCallingContext(Handle<Context>(function_->context()));
+ }
+ if (ParserApi::Parse(&info, kNoParsingFlags) && Scope::Analyze(&info)) {
+ scope = info.function()->scope();
+ }
+ } else {
+ // Function code
+ CompilationInfo info(shared_info);
+ if (ParserApi::Parse(&info, kNoParsingFlags) && Scope::Analyze(&info)) {
+ scope = info.function()->scope();
+ }
+ }
- // Retrieve the scope chain for the current position.
- int statement_position =
- shared_info->code()->SourceStatementPosition(frame_->pc());
- scope->GetNestedScopeChain(&nested_scope_chain_, statement_position);
+ // Retrieve the scope chain for the current position.
+ if (scope != NULL) {
+ int source_position = shared_info->code()->SourcePosition(frame_->pc());
+ scope->GetNestedScopeChain(&nested_scope_chain_, source_position);
+ } else {
+ // A failed reparse indicates that the preparser has diverged from the
+ // parser or that the preparse data given to the initial parse has been
+ // faulty. We fail in debug mode but in release mode we only provide the
+ // information we get from the context chain but nothing about
+ // completely stack allocated scopes or stack allocated locals.
+ UNREACHABLE();
+ }
+ }
}
// More scopes?
@@ -11288,6 +11040,7 @@ class ScopeIterator {
ScopeType scope_type = Type();
if (scope_type == ScopeTypeGlobal) {
// The global scope is always the last in the chain.
+ ASSERT(context_->IsGlobalContext());
context_ = Handle<Context>();
return;
}
@@ -11295,6 +11048,7 @@ class ScopeIterator {
context_ = Handle<Context>(context_->previous(), isolate_);
} else {
if (nested_scope_chain_.last()->HasContext()) {
+ ASSERT(context_->previous() != NULL);
context_ = Handle<Context>(context_->previous(), isolate_);
}
nested_scope_chain_.RemoveLast();
@@ -11304,14 +11058,17 @@ class ScopeIterator {
// Return the type of the current scope.
ScopeType Type() {
if (!nested_scope_chain_.is_empty()) {
- Handle<SerializedScopeInfo> scope_info = nested_scope_chain_.last();
+ Handle<ScopeInfo> scope_info = nested_scope_chain_.last();
switch (scope_info->Type()) {
case FUNCTION_SCOPE:
ASSERT(context_->IsFunctionContext() ||
!scope_info->HasContext());
return ScopeTypeLocal;
+ case MODULE_SCOPE:
+ ASSERT(context_->IsModuleContext());
+ return ScopeTypeModule;
case GLOBAL_SCOPE:
- ASSERT(context_->IsGlobalContext() || scope_info->IsQmlMode());
+ ASSERT(context_->IsGlobalContext());
return ScopeTypeGlobal;
case WITH_SCOPE:
ASSERT(context_->IsWithContext());
@@ -11340,6 +11097,9 @@ class ScopeIterator {
if (context_->IsBlockContext()) {
return ScopeTypeBlock;
}
+ if (context_->IsModuleContext()) {
+ return ScopeTypeModule;
+ }
ASSERT(context_->IsWithContext());
return ScopeTypeWith;
}
@@ -11349,11 +11109,10 @@ class ScopeIterator {
switch (Type()) {
case ScopeIterator::ScopeTypeGlobal:
return Handle<JSObject>(CurrentContext()->global());
- case ScopeIterator::ScopeTypeLocal: {
- ASSERT(nested_scope_chain_.length() == 1);
+ case ScopeIterator::ScopeTypeLocal:
// Materialize the content of the local scope into a JSObject.
- return MaterializeLocalScope(isolate_, frame_, inlined_frame_index_);
- }
+ ASSERT(nested_scope_chain_.length() == 1);
+ return MaterializeLocalScope(isolate_, frame_, inlined_jsframe_index_);
case ScopeIterator::ScopeTypeWith:
// Return the with object.
return Handle<JSObject>(JSObject::cast(CurrentContext()->extension()));
@@ -11364,22 +11123,22 @@ class ScopeIterator {
return MaterializeClosure(isolate_, CurrentContext());
case ScopeIterator::ScopeTypeBlock:
return MaterializeBlockScope(isolate_, CurrentContext());
+ case ScopeIterator::ScopeTypeModule:
+ return MaterializeModuleScope(isolate_, CurrentContext());
}
UNREACHABLE();
return Handle<JSObject>();
}
- Handle<SerializedScopeInfo> CurrentScopeInfo() {
+ Handle<ScopeInfo> CurrentScopeInfo() {
if (!nested_scope_chain_.is_empty()) {
return nested_scope_chain_.last();
} else if (context_->IsBlockContext()) {
- return Handle<SerializedScopeInfo>(
- SerializedScopeInfo::cast(context_->extension()));
+ return Handle<ScopeInfo>(ScopeInfo::cast(context_->extension()));
} else if (context_->IsFunctionContext()) {
- return Handle<SerializedScopeInfo>(
- context_->closure()->shared()->scope_info());
+ return Handle<ScopeInfo>(context_->closure()->shared()->scope_info());
}
- return Handle<SerializedScopeInfo>::null();
+ return Handle<ScopeInfo>::null();
}
// Return the context for this scope. For the local context there might not
@@ -11406,8 +11165,7 @@ class ScopeIterator {
case ScopeIterator::ScopeTypeLocal: {
PrintF("Local:\n");
- ScopeInfo<> scope_info(function_->shared()->scope_info());
- scope_info.Print();
+ function_->shared()->scope_info()->Print();
if (!CurrentContext().is_null()) {
CurrentContext()->Print();
if (CurrentContext()->has_extension()) {
@@ -11452,10 +11210,10 @@ class ScopeIterator {
private:
Isolate* isolate_;
JavaScriptFrame* frame_;
- int inlined_frame_index_;
+ int inlined_jsframe_index_;
Handle<JSFunction> function_;
Handle<Context> context_;
- List<Handle<SerializedScopeInfo> > nested_scope_chain_;
+ List<Handle<ScopeInfo> > nested_scope_chain_;
DISALLOW_IMPLICIT_CONSTRUCTORS(ScopeIterator);
};
@@ -11471,7 +11229,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeCount) {
RUNTIME_ARGUMENTS(isolate, args));
if (!maybe_check->ToObject(&check)) return maybe_check;
}
- CONVERT_CHECKED(Smi, wrapped_id, args[1]);
+ CONVERT_SMI_ARG_CHECKED(wrapped_id, 1);
// Get the frame where the debugging is performed.
StackFrame::Id id = UnwrapFrameId(wrapped_id);
@@ -11513,8 +11271,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) {
RUNTIME_ARGUMENTS(isolate, args));
if (!maybe_check->ToObject(&check)) return maybe_check;
}
- CONVERT_CHECKED(Smi, wrapped_id, args[1]);
- CONVERT_NUMBER_CHECKED(int, inlined_frame_index, Int32, args[2]);
+ CONVERT_SMI_ARG_CHECKED(wrapped_id, 1);
+ CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]);
CONVERT_NUMBER_CHECKED(int, index, Int32, args[3]);
// Get the frame where the debugging is performed.
@@ -11524,7 +11282,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScopeDetails) {
// Find the requested scope.
int n = 0;
- ScopeIterator it(isolate, frame, inlined_frame_index);
+ ScopeIterator it(isolate, frame, inlined_jsframe_index);
for (; !it.Done() && n < index; it.Next()) {
n++;
}
@@ -11653,7 +11411,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetThreadDetails) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetDisableBreak) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_BOOLEAN_CHECKED(disable_break, args[0]);
+ CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 0);
isolate->debug()->set_disable_break(disable_break);
return isolate->heap()->undefined_value();
}
@@ -11663,7 +11421,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetBreakLocations) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
- CONVERT_ARG_CHECKED(JSFunction, fun, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, fun, 0);
Handle<SharedFunctionInfo> shared(fun->shared());
// Find the number of break points
Handle<Object> break_locations = Debug::GetSourceBreakLocations(shared);
@@ -11674,12 +11432,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetBreakLocations) {
}
-// Return the value of breakpoint_relocation flag
-RUNTIME_FUNCTION(MaybeObject*, Runtime_AllowBreakPointRelocation) {
- return Smi::FromInt(FLAG_breakpoint_relocation);
-}
-
-
// Set a break point in a function
// args[0]: function
// args[1]: number: break source position (within the function source)
@@ -11687,7 +11439,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_AllowBreakPointRelocation) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFunctionBreakPoint) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSFunction, fun, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, fun, 0);
Handle<SharedFunctionInfo> shared(fun->shared());
CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
RUNTIME_ASSERT(source_position >= 0);
@@ -11793,7 +11545,7 @@ Object* Runtime::FindSharedFunctionInfoInScript(Isolate* isolate,
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) {
HandleScope scope(isolate);
ASSERT(args.length() == 3);
- CONVERT_ARG_CHECKED(JSValue, wrapper, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSValue, wrapper, 0);
CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
RUNTIME_ASSERT(source_position >= 0);
Handle<Object> break_point_object_arg = args.at<Object>(2);
@@ -11843,7 +11595,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ChangeBreakOnException) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
RUNTIME_ASSERT(args[0]->IsNumber());
- CONVERT_BOOLEAN_CHECKED(enable, args[1]);
+ CONVERT_BOOLEAN_ARG_CHECKED(enable, 1);
// If the number doesn't match an enum value, the ChangeBreakOnException
// function will default to affecting caught exceptions.
@@ -11928,12 +11680,12 @@ static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
Handle<JSFunction> function,
Handle<Context> base,
JavaScriptFrame* frame,
- int inlined_frame_index) {
+ int inlined_jsframe_index) {
HandleScope scope(isolate);
- List<Handle<SerializedScopeInfo> > scope_chain;
+ List<Handle<ScopeInfo> > scope_chain;
List<Handle<Context> > context_chain;
- ScopeIterator it(isolate, frame, inlined_frame_index);
+ ScopeIterator it(isolate, frame, inlined_jsframe_index);
for (; it.Type() != ScopeIterator::ScopeTypeGlobal &&
it.Type() != ScopeIterator::ScopeTypeLocal ; it.Next()) {
ASSERT(!it.Done());
@@ -11946,7 +11698,7 @@ static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
// Iteratively copy and or materialize the nested contexts.
while (!scope_chain.is_empty()) {
- Handle<SerializedScopeInfo> scope_info = scope_chain.RemoveLast();
+ Handle<ScopeInfo> scope_info = scope_chain.RemoveLast();
Handle<Context> current = context_chain.RemoveLast();
ASSERT(!(scope_info->HasContext() & current.is_null()));
@@ -11990,33 +11742,32 @@ static Handle<Context> CopyNestedScopeContextChain(Isolate* isolate,
// Runtime_DebugEvaluate.
static Handle<Object> GetArgumentsObject(Isolate* isolate,
JavaScriptFrame* frame,
- int inlined_frame_index,
- Handle<JSFunction> function,
- Handle<SerializedScopeInfo> scope_info,
- const ScopeInfo<>* sinfo,
+ FrameInspector* frame_inspector,
+ Handle<ScopeInfo> scope_info,
Handle<Context> function_context) {
// Try to find the value of 'arguments' to pass as parameter. If it is not
// found (that is the debugged function does not reference 'arguments' and
// does not support eval) then create an 'arguments' object.
int index;
- if (sinfo->number_of_stack_slots() > 0) {
+ if (scope_info->StackLocalCount() > 0) {
index = scope_info->StackSlotIndex(isolate->heap()->arguments_symbol());
if (index != -1) {
return Handle<Object>(frame->GetExpression(index), isolate);
}
}
- if (sinfo->number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
- index = scope_info->ContextSlotIndex(isolate->heap()->arguments_symbol(),
- NULL);
+ if (scope_info->HasHeapAllocatedLocals()) {
+ VariableMode mode;
+ InitializationFlag init_flag;
+ index = scope_info->ContextSlotIndex(
+ isolate->heap()->arguments_symbol(), &mode, &init_flag);
if (index != -1) {
return Handle<Object>(function_context->get(index), isolate);
}
}
- FrameInspector frame_inspector(frame, inlined_frame_index, isolate);
-
- int length = frame_inspector.GetParametersCount();
+ Handle<JSFunction> function(JSFunction::cast(frame_inspector->GetFunction()));
+ int length = frame_inspector->GetParametersCount();
Handle<JSObject> arguments =
isolate->factory()->NewArgumentsObject(function, length);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(length);
@@ -12024,7 +11775,7 @@ static Handle<Object> GetArgumentsObject(Isolate* isolate,
AssertNoAllocation no_gc;
WriteBarrierMode mode = array->GetWriteBarrierMode(no_gc);
for (int i = 0; i < length; i++) {
- array->set(i, frame_inspector.GetParameter(i), mode);
+ array->set(i, frame_inspector->GetParameter(i), mode);
}
arguments->set_elements(*array);
return arguments;
@@ -12059,10 +11810,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
return maybe_check_result;
}
}
- CONVERT_CHECKED(Smi, wrapped_id, args[1]);
- CONVERT_NUMBER_CHECKED(int, inlined_frame_index, Int32, args[2]);
- CONVERT_ARG_CHECKED(String, source, 3);
- CONVERT_BOOLEAN_CHECKED(disable_break, args[4]);
+ CONVERT_SMI_ARG_CHECKED(wrapped_id, 1);
+ CONVERT_NUMBER_CHECKED(int, inlined_jsframe_index, Int32, args[2]);
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 3);
+ CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 4);
Handle<Object> additional_context(args[5]);
// Handle the processing of break.
@@ -12072,10 +11823,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
StackFrame::Id id = UnwrapFrameId(wrapped_id);
JavaScriptFrameIterator it(isolate, id);
JavaScriptFrame* frame = it.frame();
- Handle<JSFunction> function(JSFunction::cast(frame->function()));
- Handle<SerializedScopeInfo> scope_info(function->shared()->scope_info());
- ScopeInfo<> sinfo(*scope_info);
- bool qml_mode = function->shared()->qml_mode();
+ FrameInspector frame_inspector(frame, inlined_jsframe_index, isolate);
+ Handle<JSFunction> function(JSFunction::cast(frame_inspector.GetFunction()));
+ Handle<ScopeInfo> scope_info(function->shared()->scope_info());
// Traverse the saved contexts chain to find the active context for the
// selected frame.
@@ -12095,14 +11845,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
isolate->factory()->undefined_value());
go_between->set_context(function->context());
#ifdef DEBUG
- ScopeInfo<> go_between_sinfo(go_between->shared()->scope_info());
- ASSERT(go_between_sinfo.number_of_parameters() == 0);
- ASSERT(go_between_sinfo.number_of_context_slots() == 0);
+ Handle<ScopeInfo> go_between_scope_info(go_between->shared()->scope_info());
+ ASSERT(go_between_scope_info->ParameterCount() == 0);
+ ASSERT(go_between_scope_info->ContextLocalCount() == 0);
#endif
// Materialize the content of the local scope into a JSObject.
- Handle<JSObject> local_scope = MaterializeLocalScope(
- isolate, frame, inlined_frame_index);
+ Handle<JSObject> local_scope = MaterializeLocalScopeWithFrameInspector(
+ isolate, frame, &frame_inspector);
RETURN_IF_EMPTY_HANDLE(isolate, local_scope);
// Allocate a new context for the debug evaluation and set the extension
@@ -12115,14 +11865,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context;
// Get the function's context if it has one.
- if (scope_info->HasHeapAllocatedLocals()) {
+ if (scope_info->HasContext()) {
function_context = Handle<Context>(frame_context->declaration_context());
}
context = CopyNestedScopeContextChain(isolate,
go_between,
context,
frame,
- inlined_frame_index);
+ inlined_jsframe_index);
if (additional_context->IsJSObject()) {
Handle<JSObject> extension = Handle<JSObject>::cast(additional_context);
@@ -12146,8 +11896,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Compiler::CompileEval(function_source,
context,
context->IsGlobalContext(),
- kNonStrictMode,
- qml_mode);
+ CLASSIC_MODE,
+ RelocInfo::kNoPosition);
if (shared.is_null()) return Failure::Exception();
Handle<JSFunction> compiled_function =
isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context);
@@ -12157,14 +11907,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluate) {
Handle<Object> receiver(frame->receiver(), isolate);
Handle<Object> evaluation_function =
Execution::Call(compiled_function, receiver, 0, NULL,
- &has_pending_exception, false,
- Handle<Object>(function->context()->qml_global()));
+ &has_pending_exception);
if (has_pending_exception) return Failure::Exception();
Handle<Object> arguments = GetArgumentsObject(isolate,
- frame, inlined_frame_index,
- function, scope_info,
- &sinfo, function_context);
+ frame,
+ &frame_inspector,
+ scope_info,
+ function_context);
// Invoke the evaluation function and return the result.
Handle<Object> argv[] = { arguments, source };
@@ -12199,8 +11949,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) {
return maybe_check_result;
}
}
- CONVERT_ARG_CHECKED(String, source, 1);
- CONVERT_BOOLEAN_CHECKED(disable_break, args[2]);
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 1);
+ CONVERT_BOOLEAN_ARG_CHECKED(disable_break, 2);
Handle<Object> additional_context(args[3]);
// Handle the processing of break.
@@ -12223,15 +11973,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) {
bool is_global = true;
if (additional_context->IsJSObject()) {
- // Create a function context first, than put 'with' context on top of it.
- Handle<JSFunction> go_between = isolate->factory()->NewFunction(
- isolate->factory()->empty_string(),
- isolate->factory()->undefined_value());
- go_between->set_context(*context);
- context =
- isolate->factory()->NewFunctionContext(
- Context::MIN_CONTEXT_SLOTS, go_between);
- context->set_extension(JSObject::cast(*additional_context));
+ // Create a new with context with the additional context information between
+ // the context of the debugged function and the eval code to be executed.
+ context = isolate->factory()->NewWithContext(
+ Handle<JSFunction>(context->closure()),
+ context,
+ Handle<JSObject>::cast(additional_context));
is_global = false;
}
@@ -12239,7 +11986,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugEvaluateGlobal) {
// Currently, the eval code will be executed in non-strict mode,
// even in the strict code context.
Handle<SharedFunctionInfo> shared =
- Compiler::CompileEval(source, context, is_global, kNonStrictMode, false);
+ Compiler::CompileEval(source,
+ context,
+ is_global,
+ CLASSIC_MODE,
+ RelocInfo::kNoPosition);
if (shared.is_null()) return Failure::Exception();
Handle<JSFunction> compiled_function =
Handle<JSFunction>(
@@ -12273,7 +12024,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetLoadedScripts) {
// because using
// instances->set(i, *GetScriptWrapper(script))
// is unsafe as GetScriptWrapper might call GC and the C++ compiler might
- // already have deferenced the instances handle.
+ // already have dereferenced the instances handle.
Handle<JSValue> wrapper = GetScriptWrapper(script);
instances->set(i, *wrapper);
}
@@ -12364,14 +12115,15 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugReferencedBy) {
ASSERT(args.length() == 3);
// First perform a full GC in order to avoid references from dead objects.
- isolate->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ isolate->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "%DebugReferencedBy");
// The heap iterator reserves the right to do a GC to make the heap iterable.
// Due to the GC above we know it won't need to do that, but it seems cleaner
// to get the heap iterator constructed before we start having unprotected
// Object* locals that are not protected by handles.
// Check parameters.
- CONVERT_CHECKED(JSObject, target, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, target, 0);
Object* instance_filter = args[1];
RUNTIME_ASSERT(instance_filter->IsUndefined() ||
instance_filter->IsJSObject());
@@ -12455,10 +12207,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugConstructedBy) {
ASSERT(args.length() == 2);
// First perform a full GC in order to avoid dead objects.
- isolate->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ isolate->heap()->CollectAllGarbage(Heap::kMakeHeapIterableMask,
+ "%DebugConstructedBy");
// Check parameters.
- CONVERT_CHECKED(JSFunction, constructor, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, constructor, 0);
CONVERT_NUMBER_CHECKED(int32_t, max_references, Int32, args[1]);
RUNTIME_ASSERT(max_references >= 0);
@@ -12502,13 +12255,32 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugConstructedBy) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugGetPrototype) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSObject, obj, args[0]);
+ CONVERT_ARG_CHECKED(JSObject, obj, 0);
// Use the __proto__ accessor.
return Accessors::ObjectPrototype.getter(obj, NULL);
}
+// Patches script source (should be called upon BeforeCompile event).
+RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugSetScriptSource) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 2);
+
+ CONVERT_ARG_HANDLE_CHECKED(JSValue, script_wrapper, 0);
+ Handle<String> source(String::cast(args[1]));
+
+ RUNTIME_ASSERT(script_wrapper->value()->IsScript());
+ Handle<Script> script(Script::cast(script_wrapper->value()));
+
+ int compilation_state = Smi::cast(script->compilation_state())->value();
+ RUNTIME_ASSERT(compilation_state == Script::COMPILATION_STATE_INITIAL);
+ script->set_source(*source);
+
+ return isolate->heap()->undefined_value();
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_SystemBreak) {
ASSERT(args.length() == 0);
CPU::DebugBreak();
@@ -12521,7 +12293,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
- CONVERT_ARG_CHECKED(JSFunction, func, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
Handle<SharedFunctionInfo> shared(func->shared());
if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
return Failure::Exception();
@@ -12537,7 +12309,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
- CONVERT_ARG_CHECKED(JSFunction, func, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
Handle<SharedFunctionInfo> shared(func->shared());
if (!SharedFunctionInfo::EnsureCompiled(shared, KEEP_EXCEPTION)) {
return Failure::Exception();
@@ -12552,7 +12324,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionGetInferredName) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
- CONVERT_CHECKED(JSFunction, f, args[0]);
+ CONVERT_ARG_CHECKED(JSFunction, f, 0);
return f->shared()->inferred_name();
}
@@ -12589,7 +12361,7 @@ RUNTIME_FUNCTION(MaybeObject*,
Runtime_LiveEditFindSharedFunctionInfosForScript) {
ASSERT(args.length() == 1);
HandleScope scope(isolate);
- CONVERT_CHECKED(JSValue, script_value, args[0]);
+ CONVERT_ARG_CHECKED(JSValue, script_value, 0);
Handle<Script> script = Handle<Script>(Script::cast(script_value->value()));
@@ -12635,8 +12407,8 @@ RUNTIME_FUNCTION(MaybeObject*,
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_CHECKED(JSValue, script, args[0]);
- CONVERT_ARG_CHECKED(String, source, 1);
+ CONVERT_ARG_CHECKED(JSValue, script, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, source, 1);
Handle<Script> script_handle = Handle<Script>(Script::cast(script->value()));
JSArray* result = LiveEdit::GatherCompileInfo(script_handle, source);
@@ -12654,13 +12426,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditGatherCompileInfo) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceScript) {
ASSERT(args.length() == 3);
HandleScope scope(isolate);
- CONVERT_CHECKED(JSValue, original_script_value, args[0]);
- CONVERT_ARG_CHECKED(String, new_source, 1);
+ CONVERT_ARG_CHECKED(JSValue, original_script_value, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, new_source, 1);
Handle<Object> old_script_name(args[2], isolate);
- CONVERT_CHECKED(Script, original_script_pointer,
- original_script_value->value());
- Handle<Script> original_script(original_script_pointer);
+ RUNTIME_ASSERT(original_script_value->value()->IsScript());
+ Handle<Script> original_script(Script::cast(original_script_value->value()));
Object* old_script = LiveEdit::ChangeScriptSource(original_script,
new_source,
@@ -12678,7 +12449,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceScript) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSourceUpdated) {
ASSERT(args.length() == 1);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSArray, shared_info, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_info, 0);
return LiveEdit::FunctionSourceUpdated(shared_info);
}
@@ -12687,8 +12458,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSourceUpdated) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceFunctionCode) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSArray, new_compile_info, 0);
- CONVERT_ARG_CHECKED(JSArray, shared_info, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, new_compile_info, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_info, 1);
return LiveEdit::ReplaceFunctionCode(new_compile_info, shared_info);
}
@@ -12703,7 +12474,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditFunctionSetScript) {
if (function_object->IsJSValue()) {
Handle<JSValue> function_wrapper = Handle<JSValue>::cast(function_object);
if (script_object->IsJSValue()) {
- CONVERT_CHECKED(Script, script, JSValue::cast(*script_object)->value());
+ RUNTIME_ASSERT(JSValue::cast(*script_object)->value()->IsScript());
+ Script* script = Script::cast(JSValue::cast(*script_object)->value());
script_object = Handle<Object>(script, isolate);
}
@@ -12723,9 +12495,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) {
ASSERT(args.length() == 3);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSValue, parent_wrapper, 0);
- CONVERT_ARG_CHECKED(JSValue, orig_wrapper, 1);
- CONVERT_ARG_CHECKED(JSValue, subst_wrapper, 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSValue, parent_wrapper, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSValue, orig_wrapper, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSValue, subst_wrapper, 2);
LiveEdit::ReplaceRefToNestedFunction(parent_wrapper, orig_wrapper,
subst_wrapper);
@@ -12742,8 +12514,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditReplaceRefToNestedFunction) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSArray, shared_array, 0);
- CONVERT_ARG_CHECKED(JSArray, position_change_array, 1);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, position_change_array, 1);
return LiveEdit::PatchFunctionPositions(shared_array, position_change_array);
}
@@ -12756,8 +12528,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditPatchFunctionPositions) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSArray, shared_array, 0);
- CONVERT_BOOLEAN_CHECKED(do_drop, args[1]);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, shared_array, 0);
+ CONVERT_BOOLEAN_ARG_CHECKED(do_drop, 1);
return *LiveEdit::CheckAndDropActivations(shared_array, do_drop);
}
@@ -12768,8 +12540,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCheckAndDropActivations) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(String, s1, 0);
- CONVERT_ARG_CHECKED(String, s2, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, s1, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, s2, 1);
return *LiveEdit::CompareStrings(s1, s2);
}
@@ -12780,7 +12552,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_LiveEditCompareStrings) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionCodePositionFromSource) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
CONVERT_NUMBER_CHECKED(int32_t, source_position, Int32, args[1]);
Handle<Code> code(function->code(), isolate);
@@ -12817,8 +12589,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFunctionCodePositionFromSource) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_ExecuteInDebugContext) {
ASSERT(args.length() == 2);
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(JSFunction, function, 0);
- CONVERT_BOOLEAN_CHECKED(without_debugger, args[1]);
+ CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
+ CONVERT_BOOLEAN_ARG_CHECKED(without_debugger, 1);
Handle<Object> result;
bool pending_exception;
@@ -12842,7 +12614,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ExecuteInDebugContext) {
// Sets a v8 flag.
RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFlags) {
- CONVERT_CHECKED(String, arg, args[0]);
+ CONVERT_ARG_CHECKED(String, arg, 0);
SmartArrayPointer<char> flags =
arg->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
FlagList::SetFlagsFromString(*flags, StrLength(*flags));
@@ -12853,7 +12625,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetFlags) {
// Performs a GC.
// Presently, it only does a full GC.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectGarbage) {
- isolate->heap()->CollectAllGarbage(true);
+ isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags, "%CollectGarbage");
return isolate->heap()->undefined_value();
}
@@ -12912,7 +12684,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DumpLOL) {
CONVERT_SMI_ARG_CHECKED(id2, 1);
CONVERT_SMI_ARG_CHECKED(start, 2);
CONVERT_SMI_ARG_CHECKED(count, 3);
- CONVERT_ARG_CHECKED(JSObject, filter_obj, 4);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, filter_obj, 4);
EnterDebugger enter_debugger;
return LiveObjectList::Dump(id1, id2, start, count, filter_obj);
#else
@@ -12939,7 +12711,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLOLObj) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLOLObjId) {
#ifdef LIVE_OBJECT_LIST
HandleScope scope;
- CONVERT_ARG_CHECKED(String, address, 0);
+ CONVERT_ARG_HANDLE_CHECKED(String, address, 0);
Object* result = LiveObjectList::GetObjId(address);
return result;
#else
@@ -12957,7 +12729,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLOLObjRetainers) {
RUNTIME_ASSERT(args[2]->IsUndefined() || args[2]->IsBoolean());
RUNTIME_ASSERT(args[3]->IsUndefined() || args[3]->IsSmi());
RUNTIME_ASSERT(args[4]->IsUndefined() || args[4]->IsSmi());
- CONVERT_ARG_CHECKED(JSObject, filter_obj, 5);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, filter_obj, 5);
Handle<JSObject> instance_filter;
if (args[1]->IsJSObject()) {
@@ -13058,7 +12830,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SummarizeLOL) {
HandleScope scope;
CONVERT_SMI_ARG_CHECKED(id1, 0);
CONVERT_SMI_ARG_CHECKED(id2, 1);
- CONVERT_ARG_CHECKED(JSObject, filter_obj, 2);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, filter_obj, 2);
EnterDebugger enter_debugger;
return LiveObjectList::Summarize(id1, id2, filter_obj);
@@ -13126,7 +12898,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetScript) {
ASSERT(args.length() == 1);
- CONVERT_CHECKED(String, script_name, args[0]);
+ CONVERT_ARG_CHECKED(String, script_name, 0);
// Find the requested script.
Handle<Object> result =
@@ -13180,9 +12952,10 @@ static bool ShowFrameInStackTrace(StackFrame* raw_frame,
// element segments each containing a receiver, function, code and
// native code offset.
RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
- ASSERT_EQ(args.length(), 2);
- Handle<Object> caller = args.at<Object>(0);
- CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[1]);
+ ASSERT_EQ(args.length(), 3);
+ CONVERT_ARG_HANDLE_CHECKED(JSObject, error_object, 0);
+ Handle<Object> caller = args.at<Object>(1);
+ CONVERT_NUMBER_CHECKED(int32_t, limit, Int32, args[2]);
HandleScope scope(isolate);
Factory* factory = isolate->factory();
@@ -13232,6 +13005,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CollectStackTrace) {
iter.Advance();
}
Handle<JSArray> result = factory->NewJSArrayWithElements(elements);
+ // Capture and attach a more detailed stack trace if necessary.
+ isolate->CaptureAndSetCurrentStackTraceFor(error_object);
result->set_length(Smi::FromInt(cursor));
return *result;
}
@@ -13263,7 +13038,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Abort) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
// This is only called from codegen, so checks might be more lax.
- CONVERT_CHECKED(JSFunctionResultCache, cache, args[0]);
+ CONVERT_ARG_CHECKED(JSFunctionResultCache, cache, 0);
Object* key = args[1];
int finger_index = cache->finger_index();
@@ -13359,8 +13134,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetFromCache) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_NewMessageObject) {
HandleScope scope(isolate);
- CONVERT_ARG_CHECKED(String, type, 0);
- CONVERT_ARG_CHECKED(JSArray, arguments, 1);
+ CONVERT_ARG_HANDLE_CHECKED(String, type, 0);
+ CONVERT_ARG_HANDLE_CHECKED(JSArray, arguments, 1);
return *isolate->factory()->NewJSMessageObject(
type,
arguments,
@@ -13373,25 +13148,25 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewMessageObject) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetType) {
- CONVERT_CHECKED(JSMessageObject, message, args[0]);
+ CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
return message->type();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetArguments) {
- CONVERT_CHECKED(JSMessageObject, message, args[0]);
+ CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
return message->arguments();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetStartPosition) {
- CONVERT_CHECKED(JSMessageObject, message, args[0]);
+ CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
return Smi::FromInt(message->start_position());
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_MessageGetScript) {
- CONVERT_CHECKED(JSMessageObject, message, args[0]);
+ CONVERT_ARG_CHECKED(JSMessageObject, message, 0);
return message->script();
}
@@ -13445,8 +13220,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ListNatives) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_Log) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(String, format, args[0]);
- CONVERT_CHECKED(JSArray, elms, args[1]);
+ CONVERT_ARG_CHECKED(String, format, 0);
+ CONVERT_ARG_CHECKED(JSArray, elms, 1);
String::FlatContent format_content = format->GetFlatContent();
RUNTIME_ASSERT(format_content.IsAscii());
Vector<const char> chars = format_content.ToAsciiVector();
@@ -13463,7 +13238,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_IS_VAR) {
#define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
RUNTIME_FUNCTION(MaybeObject*, Runtime_Has##Name) { \
- CONVERT_CHECKED(JSObject, obj, args[0]); \
+ CONVERT_ARG_CHECKED(JSObject, obj, 0); \
return isolate->heap()->ToBoolean(obj->Has##Name()); \
}
@@ -13487,8 +13262,8 @@ ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(ExternalDoubleElements)
RUNTIME_FUNCTION(MaybeObject*, Runtime_HaveSameMap) {
ASSERT(args.length() == 2);
- CONVERT_CHECKED(JSObject, obj1, args[0]);
- CONVERT_CHECKED(JSObject, obj2, args[1]);
+ CONVERT_ARG_CHECKED(JSObject, obj1, 0);
+ CONVERT_ARG_CHECKED(JSObject, obj2, 1);
return isolate->heap()->ToBoolean(obj1->map() == obj2->map());
}
@@ -13562,14 +13337,17 @@ void Runtime::PerformGC(Object* result) {
if (isolate->heap()->new_space()->AddFreshPage()) {
return;
}
+
// Try to do a garbage collection; ignore it if it fails. The C
// entry stub will throw an out-of-memory exception in that case.
- isolate->heap()->CollectGarbage(failure->allocation_space());
+ isolate->heap()->CollectGarbage(failure->allocation_space(),
+ "Runtime::PerformGC");
} else {
// Handle last resort GC and make sure to allow future allocations
// to grow the heap without causing GCs (if possible).
isolate->counters()->gc_last_resort_from_js()->Increment();
- isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags);
+ isolate->heap()->CollectAllGarbage(Heap::kNoGCFlags,
+ "Runtime::PerformGC");
}
}
diff --git a/src/3rdparty/v8/src/runtime.h b/src/3rdparty/v8/src/runtime.h
index c92c457..fe9cfd9 100644
--- a/src/3rdparty/v8/src/runtime.h
+++ b/src/3rdparty/v8/src/runtime.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -69,9 +69,6 @@ namespace internal {
\
F(GetPrototype, 1, 1) \
F(IsInPrototypeChain, 2, 1) \
- F(SetHiddenPrototype, 2, 1) \
- \
- F(IsConstructCall, 0, 1) \
\
F(GetOwnProperty, 2, 1) \
\
@@ -80,6 +77,7 @@ namespace internal {
\
/* Utilities */ \
F(CheckIsBootstrapping, 0, 1) \
+ F(Call, -1 /* >= 2 */, 1) \
F(Apply, 5, 1) \
F(GetFunctionDelegate, 1, 1) \
F(GetConstructorDelegate, 1, 1) \
@@ -91,7 +89,7 @@ namespace internal {
F(NotifyOSR, 0, 1) \
F(DeoptimizeFunction, 1, 1) \
F(RunningInSimulator, 0, 1) \
- F(OptimizeFunctionOnNextCall, 1, 1) \
+ F(OptimizeFunctionOnNextCall, -1, 1) \
F(GetOptimizationStatus, 1, 1) \
F(GetOptimizationCount, 1, 1) \
F(CompileForOnStackReplacement, 1, 1) \
@@ -143,7 +141,7 @@ namespace internal {
F(StringAdd, 2, 1) \
F(StringBuilderConcat, 3, 1) \
F(StringBuilderJoin, 3, 1) \
- F(SparseJoinWithSeparator, 3, 1) \
+ F(SparseJoinWithSeparator, 3, 1) \
\
/* Bit operations */ \
F(NumberOr, 2, 1) \
@@ -158,7 +156,6 @@ namespace internal {
/* Comparisons */ \
F(NumberEquals, 2, 1) \
F(StringEquals, 2, 1) \
- F(UserObjectEquals, 2, 1) \
\
F(NumberCompare, 3, 1) \
F(SmiLexicographicCompare, 2, 1) \
@@ -198,6 +195,7 @@ namespace internal {
F(StringLocaleCompare, 2, 1) \
F(SubString, 3, 1) \
F(StringReplaceRegExpWithString, 4, 1) \
+ F(StringReplaceOneCharWithString, 3, 1) \
F(StringMatch, 3, 1) \
F(StringTrim, 3, 1) \
F(StringToArray, 2, 1) \
@@ -229,7 +227,7 @@ namespace internal {
F(FunctionIsAPIFunction, 1, 1) \
F(FunctionIsBuiltin, 1, 1) \
F(GetScript, 1, 1) \
- F(CollectStackTrace, 2, 1) \
+ F(CollectStackTrace, 3, 1) \
F(GetV8Version, 0, 1) \
\
F(ClassOf, 1, 1) \
@@ -246,10 +244,9 @@ namespace internal {
F(DateCurrentTime, 0, 1) \
F(DateParseString, 2, 1) \
F(DateLocalTimezone, 1, 1) \
- F(DateLocalTimeOffset, 0, 1) \
- F(DateDaylightSavingsOffset, 1, 1) \
+ F(DateToUTC, 1, 1) \
F(DateMakeDay, 2, 1) \
- F(DateYMDFromTime, 2, 1) \
+ F(DateSetValue, 3, 1) \
\
/* Numbers */ \
\
@@ -274,14 +271,10 @@ namespace internal {
F(SwapElements, 3, 1) \
\
/* Getters and Setters */ \
- F(DefineAccessor, -1 /* 4 or 5 */, 1) \
F(LookupAccessor, 3, 1) \
\
/* Literals */ \
F(MaterializeRegExpLiteral, 4, 1)\
- F(CreateArrayLiteralBoilerplate, 4, 1) \
- F(CloneLiteralBoilerplate, 1, 1) \
- F(CloneShallowLiteralBoilerplate, 1, 1) \
F(CreateObjectLiteral, 4, 1) \
F(CreateObjectLiteralShallow, 4, 1) \
F(CreateArrayLiteral, 3, 1) \
@@ -322,6 +315,7 @@ namespace internal {
F(ReThrow, 1, 1) \
F(ThrowReferenceError, 1, 1) \
F(StackGuard, 0, 1) \
+ F(Interrupt, 0, 1) \
F(PromoteScheduledException, 0, 1) \
\
/* Contexts */ \
@@ -337,15 +331,14 @@ namespace internal {
/* Declarations and initialization */ \
F(DeclareGlobals, 3, 1) \
F(DeclareContextSlot, 4, 1) \
- F(InitializeVarGlobal, -1 /* 3 or 4 */, 1) \
- F(InitializeConstGlobal, 3, 1) \
+ F(InitializeVarGlobal, -1 /* 2 or 3 */, 1) \
+ F(InitializeConstGlobal, 2, 1) \
F(InitializeConstContextSlot, 3, 1) \
F(OptimizeObjectForAddingMultipleProperties, 2, 1) \
\
/* Debugging */ \
F(DebugPrint, 1, 1) \
F(DebugTrace, 0, 1) \
- F(TraceElementsKindTransition, 5, 1) \
F(TraceEnter, 0, 1) \
F(TraceExit, 1, 1) \
F(Abort, 2, 1) \
@@ -412,7 +405,6 @@ namespace internal {
F(GetThreadDetails, 2, 1) \
F(SetDisableBreak, 1, 1) \
F(GetBreakLocations, 1, 1) \
- F(AllowBreakPointRelocation, 0, 1) \
F(SetFunctionBreakPoint, 3, 1) \
F(SetScriptBreakPoint, 3, 1) \
F(ClearBreakPoint, 1, 1) \
@@ -426,6 +418,7 @@ namespace internal {
F(DebugReferencedBy, 3, 1) \
F(DebugConstructedBy, 2, 1) \
F(DebugGetPrototype, 1, 1) \
+ F(DebugSetScriptSource, 2, 1) \
F(SystemBreak, 0, 1) \
F(DebugDisassembleFunction, 1, 1) \
F(DebugDisassembleConstructor, 1, 1) \
@@ -493,11 +486,13 @@ namespace internal {
F(IsNonNegativeSmi, 1, 1) \
F(IsArray, 1, 1) \
F(IsRegExp, 1, 1) \
+ F(IsConstructCall, 0, 1) \
F(CallFunction, -1 /* receiver + n args + function */, 1) \
F(ArgumentsLength, 0, 1) \
F(Arguments, 1, 1) \
F(ValueOf, 1, 1) \
F(SetValueOf, 2, 1) \
+ F(DateField, 2 /* date object, field index */, 1) \
F(StringCharFromCode, 1, 1) \
F(StringCharAt, 2, 1) \
F(ObjectEquals, 2, 1) \
@@ -510,6 +505,7 @@ namespace internal {
F(MathPow, 2, 1) \
F(MathSin, 1, 1) \
F(MathCos, 1, 1) \
+ F(MathTan, 1, 1) \
F(MathSqrt, 1, 1) \
F(MathLog, 1, 1) \
F(IsRegExpEquivalent, 2, 1) \
@@ -524,7 +520,6 @@ namespace internal {
// a corresponding runtime function, that is called for slow cases.
// Entries have the form F(name, number of arguments, number of return values).
#define INLINE_RUNTIME_FUNCTION_LIST(F) \
- F(IsConstructCall, 0, 1) \
F(ClassOf, 1, 1) \
F(StringCharCodeAt, 2, 1) \
F(Log, 3, 1) \
@@ -634,6 +629,13 @@ class Runtime : public AllStatic {
// Get the intrinsic function with the given FunctionId.
static const Function* FunctionForId(FunctionId id);
+ static Handle<String> StringReplaceOneCharWithString(Isolate* isolate,
+ Handle<String> subject,
+ Handle<String> search,
+ Handle<String> replace,
+ bool* found,
+ int recursion_limit);
+
// General-purpose helper functions for runtime system.
static int StringMatch(Isolate* isolate,
Handle<String> sub,
@@ -683,15 +685,21 @@ class Runtime : public AllStatic {
// Helper functions used stubs.
static void PerformGC(Object* result);
+
+ // Used in runtime.cc and hydrogen's VisitArrayLiteral.
+ static Handle<Object> CreateArrayLiteralBoilerplate(
+ Isolate* isolate,
+ Handle<FixedArray> literals,
+ Handle<FixedArray> elements);
};
//---------------------------------------------------------------------------
// Constants used by interface to runtime functions.
-class DeclareGlobalsEvalFlag: public BitField<bool, 0, 1> {};
-class DeclareGlobalsStrictModeFlag: public BitField<StrictModeFlag, 1, 1> {};
-class DeclareGlobalsNativeFlag: public BitField<bool, 2, 1> {};
+class DeclareGlobalsEvalFlag: public BitField<bool, 0, 1> {};
+class DeclareGlobalsNativeFlag: public BitField<bool, 1, 1> {};
+class DeclareGlobalsLanguageMode: public BitField<LanguageMode, 2, 2> {};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/runtime.js b/src/3rdparty/v8/src/runtime.js
index 108b928..53d9a39 100644
--- a/src/3rdparty/v8/src/runtime.js
+++ b/src/3rdparty/v8/src/runtime.js
@@ -39,16 +39,16 @@
-----------------------------------
*/
-// The following const declarations are shared with other native JS files.
-// They are all declared at this one spot to avoid const redeclaration errors.
-const $Object = global.Object;
-const $Array = global.Array;
-const $String = global.String;
-const $Number = global.Number;
-const $Function = global.Function;
-const $Boolean = global.Boolean;
-const $NaN = 0/0;
-const builtins = this;
+// The following declarations are shared with other native JS files.
+// They are all declared at this one spot to avoid redeclaration errors.
+var $Object = global.Object;
+var $Array = global.Array;
+var $String = global.String;
+var $Number = global.Number;
+var $Function = global.Function;
+var $Boolean = global.Boolean;
+var $NaN = 0/0;
+var builtins = this;
// ECMA-262 Section 11.9.3.
function EQUALS(y) {
@@ -458,7 +458,8 @@ function APPLY_PREPARE(args) {
}
if (!IS_SPEC_FUNCTION(this)) {
- throw %MakeTypeError('apply_non_function', [ %ToString(this), typeof this ]);
+ throw %MakeTypeError('apply_non_function',
+ [ %ToString(this), typeof this ]);
}
// Make sure the arguments list has the right type.
diff --git a/src/3rdparty/v8/src/safepoint-table.cc b/src/3rdparty/v8/src/safepoint-table.cc
index bcd0a1d..89ad8af 100644
--- a/src/3rdparty/v8/src/safepoint-table.cc
+++ b/src/3rdparty/v8/src/safepoint-table.cc
@@ -122,17 +122,20 @@ void Safepoint::DefinePointerRegister(Register reg) {
Safepoint SafepointTableBuilder::DefineSafepoint(
- Assembler* assembler, Safepoint::Kind kind, int arguments,
- int deoptimization_index) {
- ASSERT(deoptimization_index != -1);
+ Assembler* assembler,
+ Safepoint::Kind kind,
+ int arguments,
+ Safepoint::DeoptMode deopt_mode) {
ASSERT(arguments >= 0);
- DeoptimizationInfo pc_and_deoptimization_index;
- pc_and_deoptimization_index.pc = assembler->pc_offset();
- pc_and_deoptimization_index.deoptimization_index = deoptimization_index;
- pc_and_deoptimization_index.pc_after_gap = assembler->pc_offset();
- pc_and_deoptimization_index.arguments = arguments;
- pc_and_deoptimization_index.has_doubles = (kind & Safepoint::kWithDoubles);
- deoptimization_info_.Add(pc_and_deoptimization_index);
+ DeoptimizationInfo info;
+ info.pc = assembler->pc_offset();
+ info.arguments = arguments;
+ info.has_doubles = (kind & Safepoint::kWithDoubles);
+ deoptimization_info_.Add(info);
+ deopt_index_list_.Add(Safepoint::kNoDeoptimizationIndex);
+ if (deopt_mode == Safepoint::kNoLazyDeopt) {
+ last_lazy_safepoint_ = deopt_index_list_.length();
+ }
indexes_.Add(new ZoneList<int>(8));
registers_.Add((kind & Safepoint::kWithRegisters)
? new ZoneList<int>(4)
@@ -141,6 +144,12 @@ Safepoint SafepointTableBuilder::DefineSafepoint(
}
+void SafepointTableBuilder::RecordLazyDeoptimizationIndex(int index) {
+ while (last_lazy_safepoint_ < deopt_index_list_.length()) {
+ deopt_index_list_[last_lazy_safepoint_++] = index;
+ }
+}
+
unsigned SafepointTableBuilder::GetCodeOffset() const {
ASSERT(emitted_);
return offset_;
@@ -173,11 +182,11 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
assembler->dd(length);
assembler->dd(bytes_per_entry);
- // Emit sorted table of pc offsets together with deoptimization indexes and
- // pc after gap information.
+ // Emit sorted table of pc offsets together with deoptimization indexes.
for (int i = 0; i < length; i++) {
assembler->dd(deoptimization_info_[i].pc);
- assembler->dd(EncodeExceptPC(deoptimization_info_[i]));
+ assembler->dd(EncodeExceptPC(deoptimization_info_[i],
+ deopt_index_list_[i]));
}
// Emit table of bitmaps.
@@ -222,35 +231,14 @@ void SafepointTableBuilder::Emit(Assembler* assembler, int bits_per_entry) {
}
-uint32_t SafepointTableBuilder::EncodeExceptPC(const DeoptimizationInfo& info) {
- unsigned index = info.deoptimization_index;
- unsigned gap_size = info.pc_after_gap - info.pc;
+uint32_t SafepointTableBuilder::EncodeExceptPC(const DeoptimizationInfo& info,
+ unsigned index) {
uint32_t encoding = SafepointEntry::DeoptimizationIndexField::encode(index);
- encoding |= SafepointEntry::GapCodeSizeField::encode(gap_size);
encoding |= SafepointEntry::ArgumentsField::encode(info.arguments);
encoding |= SafepointEntry::SaveDoublesField::encode(info.has_doubles);
return encoding;
}
-int SafepointTableBuilder::CountShortDeoptimizationIntervals(unsigned limit) {
- int result = 0;
- if (!deoptimization_info_.is_empty()) {
- unsigned previous_gap_end = deoptimization_info_[0].pc_after_gap;
- for (int i = 1, n = deoptimization_info_.length(); i < n; i++) {
- DeoptimizationInfo info = deoptimization_info_[i];
- if (static_cast<int>(info.deoptimization_index) !=
- Safepoint::kNoDeoptimizationIndex) {
- if (previous_gap_end + limit > info.pc) {
- result++;
- }
- previous_gap_end = info.pc_after_gap;
- }
- }
- }
- return result;
-}
-
-
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/safepoint-table.h b/src/3rdparty/v8/src/safepoint-table.h
index de537f9..57fceec 100644
--- a/src/3rdparty/v8/src/safepoint-table.h
+++ b/src/3rdparty/v8/src/safepoint-table.h
@@ -62,10 +62,20 @@ class SafepointEntry BASE_EMBEDDED {
return DeoptimizationIndexField::decode(info_);
}
- int gap_code_size() const {
- ASSERT(is_valid());
- return GapCodeSizeField::decode(info_);
- }
+ static const int kArgumentsFieldBits = 3;
+ static const int kSaveDoublesFieldBits = 1;
+ static const int kDeoptIndexBits =
+ 32 - kArgumentsFieldBits - kSaveDoublesFieldBits;
+ class DeoptimizationIndexField:
+ public BitField<int, 0, kDeoptIndexBits> {}; // NOLINT
+ class ArgumentsField:
+ public BitField<unsigned,
+ kDeoptIndexBits,
+ kArgumentsFieldBits> {}; // NOLINT
+ class SaveDoublesField:
+ public BitField<bool,
+ kDeoptIndexBits + kArgumentsFieldBits,
+ kSaveDoublesFieldBits> { }; // NOLINT
int argument_count() const {
ASSERT(is_valid());
@@ -85,27 +95,6 @@ class SafepointEntry BASE_EMBEDDED {
bool HasRegisters() const;
bool HasRegisterAt(int reg_index) const;
- // Reserve 13 bits for the gap code size. On ARM a constant pool can be
- // emitted when generating the gap code. The size of the const pool is less
- // than what can be represented in 12 bits, so 13 bits gives room for having
- // instructions before potentially emitting a constant pool.
- static const int kGapCodeSizeBits = 13;
- static const int kArgumentsFieldBits = 3;
- static const int kSaveDoublesFieldBits = 1;
- static const int kDeoptIndexBits =
- 32 - kGapCodeSizeBits - kArgumentsFieldBits - kSaveDoublesFieldBits;
- class GapCodeSizeField: public BitField<unsigned, 0, kGapCodeSizeBits> {};
- class DeoptimizationIndexField: public BitField<int,
- kGapCodeSizeBits,
- kDeoptIndexBits> {}; // NOLINT
- class ArgumentsField: public BitField<unsigned,
- kGapCodeSizeBits + kDeoptIndexBits,
- kArgumentsFieldBits> {}; // NOLINT
- class SaveDoublesField: public BitField<bool,
- kGapCodeSizeBits + kDeoptIndexBits +
- kArgumentsFieldBits,
- kSaveDoublesFieldBits> { }; // NOLINT
-
private:
unsigned info_;
uint8_t* bits_;
@@ -186,6 +175,11 @@ class Safepoint BASE_EMBEDDED {
kWithRegistersAndDoubles = kWithRegisters | kWithDoubles
} Kind;
+ enum DeoptMode {
+ kNoLazyDeopt,
+ kLazyDeopt
+ };
+
static const int kNoDeoptimizationIndex =
(1 << (SafepointEntry::kDeoptIndexBits)) - 1;
@@ -206,9 +200,11 @@ class SafepointTableBuilder BASE_EMBEDDED {
public:
SafepointTableBuilder()
: deoptimization_info_(32),
+ deopt_index_list_(32),
indexes_(32),
registers_(32),
- emitted_(false) { }
+ emitted_(false),
+ last_lazy_safepoint_(0) { }
// Get the offset of the emitted safepoint table in the code.
unsigned GetCodeOffset() const;
@@ -217,50 +213,34 @@ class SafepointTableBuilder BASE_EMBEDDED {
Safepoint DefineSafepoint(Assembler* assembler,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index);
-
- // Update the last safepoint with the size of the code generated until the
- // end of the gap following it.
- void SetPcAfterGap(int pc) {
- ASSERT(!deoptimization_info_.is_empty());
- int index = deoptimization_info_.length() - 1;
- deoptimization_info_[index].pc_after_gap = pc;
- }
+ Safepoint::DeoptMode mode);
- // Get the end pc offset of the last safepoint, including the code generated
- // until the end of the gap following it.
- unsigned GetPcAfterGap() {
- int index = deoptimization_info_.length();
- if (index == 0) return 0;
- return deoptimization_info_[index - 1].pc_after_gap;
- }
+ // Record deoptimization index for lazy deoptimization for the last
+ // outstanding safepoints.
+ void RecordLazyDeoptimizationIndex(int index);
// Emit the safepoint table after the body. The number of bits per
// entry must be enough to hold all the pointer indexes.
void Emit(Assembler* assembler, int bits_per_entry);
- // Count the number of deoptimization points where the next
- // following deoptimization point comes less than limit bytes
- // after the end of this point's gap.
- int CountShortDeoptimizationIntervals(unsigned limit);
private:
struct DeoptimizationInfo {
unsigned pc;
- unsigned deoptimization_index;
- unsigned pc_after_gap;
unsigned arguments;
bool has_doubles;
};
- uint32_t EncodeExceptPC(const DeoptimizationInfo& info);
+ uint32_t EncodeExceptPC(const DeoptimizationInfo& info, unsigned index);
ZoneList<DeoptimizationInfo> deoptimization_info_;
+ ZoneList<unsigned> deopt_index_list_;
ZoneList<ZoneList<int>*> indexes_;
ZoneList<ZoneList<int>*> registers_;
unsigned offset_;
bool emitted_;
+ int last_lazy_safepoint_;
DISALLOW_COPY_AND_ASSIGN(SafepointTableBuilder);
};
diff --git a/src/3rdparty/v8/src/scanner-character-streams.cc b/src/3rdparty/v8/src/scanner-character-streams.cc
index ee10703..56b9f03 100644
--- a/src/3rdparty/v8/src/scanner-character-streams.cc
+++ b/src/3rdparty/v8/src/scanner-character-streams.cc
@@ -36,19 +36,19 @@ namespace v8 {
namespace internal {
// ----------------------------------------------------------------------------
-// BufferedUC16CharacterStreams
+// BufferedUtf16CharacterStreams
-BufferedUC16CharacterStream::BufferedUC16CharacterStream()
- : UC16CharacterStream(),
+BufferedUtf16CharacterStream::BufferedUtf16CharacterStream()
+ : Utf16CharacterStream(),
pushback_limit_(NULL) {
// Initialize buffer as being empty. First read will fill the buffer.
buffer_cursor_ = buffer_;
buffer_end_ = buffer_;
}
-BufferedUC16CharacterStream::~BufferedUC16CharacterStream() { }
+BufferedUtf16CharacterStream::~BufferedUtf16CharacterStream() { }
-void BufferedUC16CharacterStream::PushBack(uc32 character) {
+void BufferedUtf16CharacterStream::PushBack(uc32 character) {
if (character == kEndOfInput) {
pos_--;
return;
@@ -63,7 +63,7 @@ void BufferedUC16CharacterStream::PushBack(uc32 character) {
}
-void BufferedUC16CharacterStream::SlowPushBack(uc16 character) {
+void BufferedUtf16CharacterStream::SlowPushBack(uc16 character) {
// In pushback mode, the end of the buffer contains pushback,
// and the start of the buffer (from buffer start to pushback_limit_)
// contains valid data that comes just after the pushback.
@@ -89,7 +89,7 @@ void BufferedUC16CharacterStream::SlowPushBack(uc16 character) {
}
-bool BufferedUC16CharacterStream::ReadBlock() {
+bool BufferedUtf16CharacterStream::ReadBlock() {
buffer_cursor_ = buffer_;
if (pushback_limit_ != NULL) {
// Leave pushback mode.
@@ -106,7 +106,7 @@ bool BufferedUC16CharacterStream::ReadBlock() {
}
-unsigned BufferedUC16CharacterStream::SlowSeekForward(unsigned delta) {
+unsigned BufferedUtf16CharacterStream::SlowSeekForward(unsigned delta) {
// Leave pushback mode (i.e., ignore that there might be valid data
// in the buffer before the pushback_limit_ point).
pushback_limit_ = NULL;
@@ -114,10 +114,10 @@ unsigned BufferedUC16CharacterStream::SlowSeekForward(unsigned delta) {
}
// ----------------------------------------------------------------------------
-// GenericStringUC16CharacterStream
+// GenericStringUtf16CharacterStream
-GenericStringUC16CharacterStream::GenericStringUC16CharacterStream(
+GenericStringUtf16CharacterStream::GenericStringUtf16CharacterStream(
Handle<String> data,
unsigned start_position,
unsigned end_position)
@@ -130,10 +130,10 @@ GenericStringUC16CharacterStream::GenericStringUC16CharacterStream(
}
-GenericStringUC16CharacterStream::~GenericStringUC16CharacterStream() { }
+GenericStringUtf16CharacterStream::~GenericStringUtf16CharacterStream() { }
-unsigned GenericStringUC16CharacterStream::BufferSeekForward(unsigned delta) {
+unsigned GenericStringUtf16CharacterStream::BufferSeekForward(unsigned delta) {
unsigned old_pos = pos_;
pos_ = Min(pos_ + delta, length_);
ReadBlock();
@@ -141,7 +141,7 @@ unsigned GenericStringUC16CharacterStream::BufferSeekForward(unsigned delta) {
}
-unsigned GenericStringUC16CharacterStream::FillBuffer(unsigned from_pos,
+unsigned GenericStringUtf16CharacterStream::FillBuffer(unsigned from_pos,
unsigned length) {
if (from_pos >= length_) return 0;
if (from_pos + length > length_) {
@@ -153,10 +153,10 @@ unsigned GenericStringUC16CharacterStream::FillBuffer(unsigned from_pos,
// ----------------------------------------------------------------------------
-// Utf8ToUC16CharacterStream
-Utf8ToUC16CharacterStream::Utf8ToUC16CharacterStream(const byte* data,
- unsigned length)
- : BufferedUC16CharacterStream(),
+// Utf8ToUtf16CharacterStream
+Utf8ToUtf16CharacterStream::Utf8ToUtf16CharacterStream(const byte* data,
+ unsigned length)
+ : BufferedUtf16CharacterStream(),
raw_data_(data),
raw_data_length_(length),
raw_data_pos_(0),
@@ -165,10 +165,10 @@ Utf8ToUC16CharacterStream::Utf8ToUC16CharacterStream(const byte* data,
}
-Utf8ToUC16CharacterStream::~Utf8ToUC16CharacterStream() { }
+Utf8ToUtf16CharacterStream::~Utf8ToUtf16CharacterStream() { }
-unsigned Utf8ToUC16CharacterStream::BufferSeekForward(unsigned delta) {
+unsigned Utf8ToUtf16CharacterStream::BufferSeekForward(unsigned delta) {
unsigned old_pos = pos_;
unsigned target_pos = pos_ + delta;
SetRawPosition(target_pos);
@@ -178,9 +178,9 @@ unsigned Utf8ToUC16CharacterStream::BufferSeekForward(unsigned delta) {
}
-unsigned Utf8ToUC16CharacterStream::FillBuffer(unsigned char_position,
- unsigned length) {
- static const unibrow::uchar kMaxUC16Character = 0xffff;
+unsigned Utf8ToUtf16CharacterStream::FillBuffer(unsigned char_position,
+ unsigned length) {
+ static const unibrow::uchar kMaxUtf16Character = 0xffff;
SetRawPosition(char_position);
if (raw_character_position_ != char_position) {
// char_position was not a valid position in the stream (hit the end
@@ -188,7 +188,7 @@ unsigned Utf8ToUC16CharacterStream::FillBuffer(unsigned char_position,
return 0u;
}
unsigned i = 0;
- while (i < length) {
+ while (i < length - 1) {
if (raw_data_pos_ == raw_data_length_) break;
unibrow::uchar c = raw_data_[raw_data_pos_];
if (c <= unibrow::Utf8::kMaxOneByteChar) {
@@ -197,12 +197,13 @@ unsigned Utf8ToUC16CharacterStream::FillBuffer(unsigned char_position,
c = unibrow::Utf8::CalculateValue(raw_data_ + raw_data_pos_,
raw_data_length_ - raw_data_pos_,
&raw_data_pos_);
- // Don't allow characters outside of the BMP.
- if (c > kMaxUC16Character) {
- c = unibrow::Utf8::kBadChar;
- }
}
- buffer_[i++] = static_cast<uc16>(c);
+ if (c > kMaxUtf16Character) {
+ buffer_[i++] = unibrow::Utf16::LeadSurrogate(c);
+ buffer_[i++] = unibrow::Utf16::TrailSurrogate(c);
+ } else {
+ buffer_[i++] = static_cast<uc16>(c);
+ }
}
raw_character_position_ = char_position + i;
return i;
@@ -266,37 +267,52 @@ static inline void Utf8CharacterForward(const byte* buffer, unsigned* cursor) {
}
-void Utf8ToUC16CharacterStream::SetRawPosition(unsigned target_position) {
+// This can't set a raw position between two surrogate pairs, since there
+// is no position in the UTF8 stream that corresponds to that. This assumes
+// that the surrogate pair is correctly coded as a 4 byte UTF-8 sequence. If
+// it is illegally coded as two 3 byte sequences then there is no problem here.
+void Utf8ToUtf16CharacterStream::SetRawPosition(unsigned target_position) {
if (raw_character_position_ > target_position) {
// Spool backwards in utf8 buffer.
do {
+ int old_pos = raw_data_pos_;
Utf8CharacterBack(raw_data_, &raw_data_pos_);
raw_character_position_--;
+ ASSERT(old_pos - raw_data_pos_ <= 4);
+ // Step back over both code units for surrogate pairs.
+ if (old_pos - raw_data_pos_ == 4) raw_character_position_--;
} while (raw_character_position_ > target_position);
+ // No surrogate pair splitting.
+ ASSERT(raw_character_position_ == target_position);
return;
}
// Spool forwards in the utf8 buffer.
while (raw_character_position_ < target_position) {
if (raw_data_pos_ == raw_data_length_) return;
+ int old_pos = raw_data_pos_;
Utf8CharacterForward(raw_data_, &raw_data_pos_);
raw_character_position_++;
+ ASSERT(raw_data_pos_ - old_pos <= 4);
+ if (raw_data_pos_ - old_pos == 4) raw_character_position_++;
}
+ // No surrogate pair splitting.
+ ASSERT(raw_character_position_ == target_position);
}
// ----------------------------------------------------------------------------
-// ExternalTwoByteStringUC16CharacterStream
+// ExternalTwoByteStringUtf16CharacterStream
-ExternalTwoByteStringUC16CharacterStream::
- ~ExternalTwoByteStringUC16CharacterStream() { }
+ExternalTwoByteStringUtf16CharacterStream::
+ ~ExternalTwoByteStringUtf16CharacterStream() { }
-ExternalTwoByteStringUC16CharacterStream
- ::ExternalTwoByteStringUC16CharacterStream(
+ExternalTwoByteStringUtf16CharacterStream
+ ::ExternalTwoByteStringUtf16CharacterStream(
Handle<ExternalTwoByteString> data,
int start_position,
int end_position)
- : UC16CharacterStream(),
+ : Utf16CharacterStream(),
source_(data),
raw_data_(data->GetTwoByteData(start_position)) {
buffer_cursor_ = raw_data_,
diff --git a/src/3rdparty/v8/src/scanner-character-streams.h b/src/3rdparty/v8/src/scanner-character-streams.h
index 5c4ea2c..319ee8f 100644
--- a/src/3rdparty/v8/src/scanner-character-streams.h
+++ b/src/3rdparty/v8/src/scanner-character-streams.h
@@ -36,10 +36,10 @@ namespace internal {
// A buffered character stream based on a random access character
// source (ReadBlock can be called with pos_ pointing to any position,
// even positions before the current).
-class BufferedUC16CharacterStream: public UC16CharacterStream {
+class BufferedUtf16CharacterStream: public Utf16CharacterStream {
public:
- BufferedUC16CharacterStream();
- virtual ~BufferedUC16CharacterStream();
+ BufferedUtf16CharacterStream();
+ virtual ~BufferedUtf16CharacterStream();
virtual void PushBack(uc32 character);
@@ -60,12 +60,12 @@ class BufferedUC16CharacterStream: public UC16CharacterStream {
// Generic string stream.
-class GenericStringUC16CharacterStream: public BufferedUC16CharacterStream {
+class GenericStringUtf16CharacterStream: public BufferedUtf16CharacterStream {
public:
- GenericStringUC16CharacterStream(Handle<String> data,
- unsigned start_position,
- unsigned end_position);
- virtual ~GenericStringUC16CharacterStream();
+ GenericStringUtf16CharacterStream(Handle<String> data,
+ unsigned start_position,
+ unsigned end_position);
+ virtual ~GenericStringUtf16CharacterStream();
protected:
virtual unsigned BufferSeekForward(unsigned delta);
@@ -77,11 +77,11 @@ class GenericStringUC16CharacterStream: public BufferedUC16CharacterStream {
};
-// UC16 stream based on a literal UTF-8 string.
-class Utf8ToUC16CharacterStream: public BufferedUC16CharacterStream {
+// Utf16 stream based on a literal UTF-8 string.
+class Utf8ToUtf16CharacterStream: public BufferedUtf16CharacterStream {
public:
- Utf8ToUC16CharacterStream(const byte* data, unsigned length);
- virtual ~Utf8ToUC16CharacterStream();
+ Utf8ToUtf16CharacterStream(const byte* data, unsigned length);
+ virtual ~Utf8ToUtf16CharacterStream();
protected:
virtual unsigned BufferSeekForward(unsigned delta);
@@ -98,12 +98,12 @@ class Utf8ToUC16CharacterStream: public BufferedUC16CharacterStream {
// UTF16 buffer to read characters from an external string.
-class ExternalTwoByteStringUC16CharacterStream: public UC16CharacterStream {
+class ExternalTwoByteStringUtf16CharacterStream: public Utf16CharacterStream {
public:
- ExternalTwoByteStringUC16CharacterStream(Handle<ExternalTwoByteString> data,
- int start_position,
- int end_position);
- virtual ~ExternalTwoByteStringUC16CharacterStream();
+ ExternalTwoByteStringUtf16CharacterStream(Handle<ExternalTwoByteString> data,
+ int start_position,
+ int end_position);
+ virtual ~ExternalTwoByteStringUtf16CharacterStream();
virtual void PushBack(uc32 character) {
ASSERT(buffer_cursor_ > raw_data_);
diff --git a/src/3rdparty/v8/src/scanner.cc b/src/3rdparty/v8/src/scanner.cc
index 01fe81c..7901b5d 100755
--- a/src/3rdparty/v8/src/scanner.cc
+++ b/src/3rdparty/v8/src/scanner.cc
@@ -41,10 +41,11 @@ namespace internal {
Scanner::Scanner(UnicodeCache* unicode_cache)
: unicode_cache_(unicode_cache),
octal_pos_(Location::invalid()),
- harmony_scoping_(false) { }
+ harmony_scoping_(false),
+ harmony_modules_(false) { }
-void Scanner::Initialize(UC16CharacterStream* source) {
+void Scanner::Initialize(Utf16CharacterStream* source) {
source_ = source;
// Need to capture identifiers in order to recognize "get" and "set"
// in object literals.
@@ -830,7 +831,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('e') \
KEYWORD("else", Token::ELSE) \
KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \
- KEYWORD("export", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("export", harmony_modules \
+ ? Token::EXPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \
KEYWORD_GROUP('f') \
KEYWORD("false", Token::FALSE_LITERAL) \
@@ -840,7 +842,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
KEYWORD_GROUP('i') \
KEYWORD("if", Token::IF) \
KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \
- KEYWORD("import", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("import", harmony_modules \
+ ? Token::IMPORT : Token::FUTURE_RESERVED_WORD) \
KEYWORD("in", Token::IN) \
KEYWORD("instanceof", Token::INSTANCEOF) \
KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \
@@ -879,7 +882,8 @@ uc32 Scanner::ScanIdentifierUnicodeEscape() {
static Token::Value KeywordOrIdentifierToken(const char* input,
int input_length,
- bool harmony_scoping) {
+ bool harmony_scoping,
+ bool harmony_modules) {
ASSERT(input_length >= 1);
const int kMinLength = 2;
const int kMaxLength = 10;
@@ -955,7 +959,8 @@ Token::Value Scanner::ScanIdentifierOrKeyword() {
Vector<const char> chars = next_.literal_chars->ascii_literal();
return KeywordOrIdentifierToken(chars.start(),
chars.length(),
- harmony_scoping_);
+ harmony_scoping_,
+ harmony_modules_);
}
return Token::IDENTIFIER;
diff --git a/src/3rdparty/v8/src/scanner.h b/src/3rdparty/v8/src/scanner.h
index 88e3bce..045e7d2 100644
--- a/src/3rdparty/v8/src/scanner.h
+++ b/src/3rdparty/v8/src/scanner.h
@@ -42,15 +42,24 @@ namespace v8 {
namespace internal {
-// General collection of bit-flags that can be passed to scanners and
+// General collection of (multi-)bit-flags that can be passed to scanners and
// parsers to signify their (initial) mode of operation.
enum ParsingFlags {
kNoParsingFlags = 0,
- kAllowLazy = 1,
- kAllowNativesSyntax = 2,
- kHarmonyScoping = 4
+ // Embed LanguageMode values in parsing flags, i.e., equivalent to:
+ // CLASSIC_MODE = 0,
+ // STRICT_MODE,
+ // EXTENDED_MODE,
+ kLanguageModeMask = 0x03,
+ kAllowLazy = 0x04,
+ kAllowNativesSyntax = 0x08,
+ kAllowModules = 0x10
};
+STATIC_ASSERT((kLanguageModeMask & CLASSIC_MODE) == CLASSIC_MODE);
+STATIC_ASSERT((kLanguageModeMask & STRICT_MODE) == STRICT_MODE);
+STATIC_ASSERT((kLanguageModeMask & EXTENDED_MODE) == EXTENDED_MODE);
+
// Returns the value (0 .. 15) of a hexadecimal character c.
// If c is not a legal hexadecimal character, returns a value < 0.
@@ -64,15 +73,17 @@ inline int HexValue(uc32 c) {
// ---------------------------------------------------------------------
-// Buffered stream of characters, using an internal UC16 buffer.
+// Buffered stream of UTF-16 code units, using an internal UTF-16 buffer.
+// A code unit is a 16 bit value representing either a 16 bit code point
+// or one part of a surrogate pair that make a single 21 bit code point.
-class UC16CharacterStream {
+class Utf16CharacterStream {
public:
- UC16CharacterStream() : pos_(0) { }
- virtual ~UC16CharacterStream() { }
+ Utf16CharacterStream() : pos_(0) { }
+ virtual ~Utf16CharacterStream() { }
- // Returns and advances past the next UC16 character in the input
- // stream. If there are no more characters, it returns a negative
+ // Returns and advances past the next UTF-16 code unit in the input
+ // stream. If there are no more code units, it returns a negative
// value.
inline uc32 Advance() {
if (buffer_cursor_ < buffer_end_ || ReadBlock()) {
@@ -81,47 +92,47 @@ class UC16CharacterStream {
}
// Note: currently the following increment is necessary to avoid a
// parser problem! The scanner treats the final kEndOfInput as
- // a character with a position, and does math relative to that
+ // a code unit with a position, and does math relative to that
// position.
pos_++;
return kEndOfInput;
}
- // Return the current position in the character stream.
+ // Return the current position in the code unit stream.
// Starts at zero.
inline unsigned pos() const { return pos_; }
- // Skips forward past the next character_count UC16 characters
+ // Skips forward past the next code_unit_count UTF-16 code units
// in the input, or until the end of input if that comes sooner.
- // Returns the number of characters actually skipped. If less
- // than character_count,
- inline unsigned SeekForward(unsigned character_count) {
+ // Returns the number of code units actually skipped. If less
+ // than code_unit_count,
+ inline unsigned SeekForward(unsigned code_unit_count) {
unsigned buffered_chars =
static_cast<unsigned>(buffer_end_ - buffer_cursor_);
- if (character_count <= buffered_chars) {
- buffer_cursor_ += character_count;
- pos_ += character_count;
- return character_count;
+ if (code_unit_count <= buffered_chars) {
+ buffer_cursor_ += code_unit_count;
+ pos_ += code_unit_count;
+ return code_unit_count;
}
- return SlowSeekForward(character_count);
+ return SlowSeekForward(code_unit_count);
}
- // Pushes back the most recently read UC16 character (or negative
+ // Pushes back the most recently read UTF-16 code unit (or negative
// value if at end of input), i.e., the value returned by the most recent
// call to Advance.
// Must not be used right after calling SeekForward.
- virtual void PushBack(int32_t character) = 0;
+ virtual void PushBack(int32_t code_unit) = 0;
protected:
static const uc32 kEndOfInput = -1;
- // Ensures that the buffer_cursor_ points to the character at
+ // Ensures that the buffer_cursor_ points to the code_unit at
// position pos_ of the input, if possible. If the position
// is at or after the end of the input, return false. If there
- // are more characters available, return true.
+ // are more code_units available, return true.
virtual bool ReadBlock() = 0;
- virtual unsigned SlowSeekForward(unsigned character_count) = 0;
+ virtual unsigned SlowSeekForward(unsigned code_unit_count) = 0;
const uc16* buffer_cursor_;
const uc16* buffer_end_;
@@ -169,23 +180,24 @@ class LiteralBuffer {
}
}
- INLINE(void AddChar(uc16 character)) {
+ INLINE(void AddChar(uint32_t code_unit)) {
if (position_ >= backing_store_.length()) ExpandBuffer();
if (is_ascii_) {
- if (character < kMaxAsciiCharCodeU) {
- backing_store_[position_] = static_cast<byte>(character);
+ if (code_unit < kMaxAsciiCharCodeU) {
+ backing_store_[position_] = static_cast<byte>(code_unit);
position_ += kASCIISize;
return;
}
- ConvertToUC16();
+ ConvertToUtf16();
}
- *reinterpret_cast<uc16*>(&backing_store_[position_]) = character;
+ ASSERT(code_unit < 0x10000u);
+ *reinterpret_cast<uc16*>(&backing_store_[position_]) = code_unit;
position_ += kUC16Size;
}
bool is_ascii() { return is_ascii_; }
- Vector<const uc16> uc16_literal() {
+ Vector<const uc16> utf16_literal() {
ASSERT(!is_ascii_);
ASSERT((position_ & 0x1) == 0);
return Vector<const uc16>(
@@ -227,13 +239,13 @@ class LiteralBuffer {
backing_store_ = new_store;
}
- void ConvertToUC16() {
+ void ConvertToUtf16() {
ASSERT(is_ascii_);
Vector<byte> new_store;
int new_content_size = position_ * kUC16Size;
if (new_content_size >= backing_store_.length()) {
- // Ensure room for all currently read characters as UC16 as well
- // as the character about to be stored.
+ // Ensure room for all currently read code units as UC16 as well
+ // as the code unit about to be stored.
new_store = Vector<byte>::New(NewCapacity(new_content_size));
} else {
new_store = backing_store_;
@@ -307,7 +319,7 @@ class Scanner {
explicit Scanner(UnicodeCache* scanner_contants);
- void Initialize(UC16CharacterStream* source);
+ void Initialize(Utf16CharacterStream* source);
// Returns the next token and advances input.
Token::Value Next();
@@ -326,9 +338,9 @@ class Scanner {
ASSERT_NOT_NULL(current_.literal_chars);
return current_.literal_chars->ascii_literal();
}
- Vector<const uc16> literal_uc16_string() {
+ Vector<const uc16> literal_utf16_string() {
ASSERT_NOT_NULL(current_.literal_chars);
- return current_.literal_chars->uc16_literal();
+ return current_.literal_chars->utf16_literal();
}
bool is_literal_ascii() {
ASSERT_NOT_NULL(current_.literal_chars);
@@ -362,9 +374,9 @@ class Scanner {
ASSERT_NOT_NULL(next_.literal_chars);
return next_.literal_chars->ascii_literal();
}
- Vector<const uc16> next_literal_uc16_string() {
+ Vector<const uc16> next_literal_utf16_string() {
ASSERT_NOT_NULL(next_.literal_chars);
- return next_.literal_chars->uc16_literal();
+ return next_.literal_chars->utf16_literal();
}
bool is_next_literal_ascii() {
ASSERT_NOT_NULL(next_.literal_chars);
@@ -395,8 +407,14 @@ class Scanner {
bool HarmonyScoping() const {
return harmony_scoping_;
}
- void SetHarmonyScoping(bool block_scoping) {
- harmony_scoping_ = block_scoping;
+ void SetHarmonyScoping(bool scoping) {
+ harmony_scoping_ = scoping;
+ }
+ bool HarmonyModules() const {
+ return harmony_modules_;
+ }
+ void SetHarmonyModules(bool modules) {
+ harmony_modules_ = modules;
}
@@ -527,8 +545,8 @@ class Scanner {
TokenDesc current_; // desc for current token (as returned by Next())
TokenDesc next_; // desc for next token (one token look-ahead)
- // Input stream. Must be initialized to an UC16CharacterStream.
- UC16CharacterStream* source_;
+ // Input stream. Must be initialized to an Utf16CharacterStream.
+ Utf16CharacterStream* source_;
// Start position of the octal literal last scanned.
@@ -544,9 +562,10 @@ class Scanner {
// Whether there is a multi-line comment that contains a
// line-terminator after the current token, and before the next.
bool has_multiline_comment_before_next_;
- // Whether we scan 'let' as a keyword for harmony block scoped
- // let bindings.
+ // Whether we scan 'let' as a keyword for harmony block-scoped let bindings.
bool harmony_scoping_;
+ // Whether we scan 'module', 'import', 'export' as keywords.
+ bool harmony_modules_;
};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/scopeinfo.cc b/src/3rdparty/v8/src/scopeinfo.cc
index a61b787..0f36234 100644
--- a/src/3rdparty/v8/src/scopeinfo.cc
+++ b/src/3rdparty/v8/src/scopeinfo.cc
@@ -38,499 +38,297 @@ namespace v8 {
namespace internal {
-static int CompareLocal(Variable* const* v, Variable* const* w) {
- int x = (*v)->index();
- int y = (*w)->index();
- // Consider sorting them according to type as well?
- return x - y;
-}
-
-
-template<class Allocator>
-ScopeInfo<Allocator>::ScopeInfo(Scope* scope)
- : function_name_(FACTORY->empty_symbol()),
- calls_eval_(scope->calls_eval()),
- is_strict_mode_(scope->is_strict_mode()),
- is_qml_mode_(scope->is_qml_mode()),
- type_(scope->type()),
- parameters_(scope->num_parameters()),
- stack_slots_(scope->num_stack_slots()),
- context_slots_(scope->num_heap_slots()),
- context_modes_(scope->num_heap_slots()) {
- // Add parameters.
- for (int i = 0; i < scope->num_parameters(); i++) {
- ASSERT(parameters_.length() == i);
- parameters_.Add(scope->parameter(i)->name());
- }
-
- // Add stack locals and collect heap locals.
- // We are assuming that the locals' slots are allocated in
- // increasing order, so we can simply add them to the
- // ScopeInfo lists. However, due to usage analysis, this is
- // not true for context-allocated locals: Some of them
- // may be parameters which are allocated before the
- // non-parameter locals. When the non-parameter locals are
- // sorted according to usage, the allocated slot indices may
- // not be in increasing order with the variable list anymore.
- // Thus, we first collect the context-allocated locals, and then
- // sort them by context slot index before adding them to the
- // ScopeInfo list.
- List<Variable*, Allocator> locals(32); // 32 is a wild guess
- ASSERT(locals.is_empty());
- scope->CollectUsedVariables(&locals);
- locals.Sort(&CompareLocal);
-
- List<Variable*, Allocator> heap_locals(locals.length());
- for (int i = 0; i < locals.length(); i++) {
- Variable* var = locals[i];
- if (var->is_used()) {
- switch (var->location()) {
- case Variable::UNALLOCATED:
- case Variable::PARAMETER:
- break;
-
- case Variable::LOCAL:
- ASSERT(stack_slots_.length() == var->index());
- stack_slots_.Add(var->name());
- break;
-
- case Variable::CONTEXT:
- heap_locals.Add(var);
- break;
-
- case Variable::LOOKUP:
- // We don't expect lookup variables in the locals list.
- UNREACHABLE();
- break;
- }
- }
- }
-
- // Add heap locals.
- if (scope->num_heap_slots() > 0) {
- // Add user-defined slots.
- for (int i = 0; i < heap_locals.length(); i++) {
- ASSERT(heap_locals[i]->index() - Context::MIN_CONTEXT_SLOTS ==
- context_slots_.length());
- ASSERT(heap_locals[i]->index() - Context::MIN_CONTEXT_SLOTS ==
- context_modes_.length());
- context_slots_.Add(heap_locals[i]->name());
- context_modes_.Add(heap_locals[i]->mode());
+Handle<ScopeInfo> ScopeInfo::Create(Scope* scope) {
+ // Collect stack and context locals.
+ ZoneList<Variable*> stack_locals(scope->StackLocalCount());
+ ZoneList<Variable*> context_locals(scope->ContextLocalCount());
+ scope->CollectStackAndContextLocals(&stack_locals, &context_locals);
+ const int stack_local_count = stack_locals.length();
+ const int context_local_count = context_locals.length();
+ // Make sure we allocate the correct amount.
+ ASSERT(scope->StackLocalCount() == stack_local_count);
+ ASSERT(scope->ContextLocalCount() == context_local_count);
+
+ // Determine use and location of the function variable if it is present.
+ FunctionVariableInfo function_name_info;
+ VariableMode function_variable_mode;
+ if (scope->is_function_scope() && scope->function() != NULL) {
+ Variable* var = scope->function()->var();
+ if (!var->is_used()) {
+ function_name_info = UNUSED;
+ } else if (var->IsContextSlot()) {
+ function_name_info = CONTEXT;
+ } else {
+ ASSERT(var->IsStackLocal());
+ function_name_info = STACK;
}
-
+ function_variable_mode = var->mode();
} else {
- ASSERT(heap_locals.length() == 0);
+ function_name_info = NONE;
+ function_variable_mode = VAR;
}
- // Add the function context slot, if present.
- // For now, this must happen at the very end because of the
- // ordering of the scope info slots and the respective slot indices.
- if (scope->is_function_scope()) {
- VariableProxy* proxy = scope->function();
- if (proxy != NULL &&
- proxy->var()->is_used() &&
- proxy->var()->IsContextSlot()) {
- function_name_ = proxy->name();
- // Note that we must not find the function name in the context slot
- // list - instead it must be handled separately in the
- // Contexts::Lookup() function. Thus record an empty symbol here so we
- // get the correct number of context slots.
- ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS ==
- context_slots_.length());
- ASSERT(proxy->var()->index() - Context::MIN_CONTEXT_SLOTS ==
- context_modes_.length());
- context_slots_.Add(FACTORY->empty_symbol());
- context_modes_.Add(proxy->var()->mode());
- }
+ const bool has_function_name = function_name_info != NONE;
+ const int parameter_count = scope->num_parameters();
+ const int length = kVariablePartIndex
+ + parameter_count + stack_local_count + 2 * context_local_count
+ + (has_function_name ? 2 : 0);
+
+ Handle<ScopeInfo> scope_info = FACTORY->NewScopeInfo(length);
+
+ // Encode the flags.
+ int flags = TypeField::encode(scope->type()) |
+ CallsEvalField::encode(scope->calls_eval()) |
+ LanguageModeField::encode(scope->language_mode()) |
+ FunctionVariableField::encode(function_name_info) |
+ FunctionVariableMode::encode(function_variable_mode);
+ scope_info->SetFlags(flags);
+ scope_info->SetParameterCount(parameter_count);
+ scope_info->SetStackLocalCount(stack_local_count);
+ scope_info->SetContextLocalCount(context_local_count);
+
+ int index = kVariablePartIndex;
+ // Add parameters.
+ ASSERT(index == scope_info->ParameterEntriesIndex());
+ for (int i = 0; i < parameter_count; ++i) {
+ scope_info->set(index++, *scope->parameter(i)->name());
}
-}
+ // Add stack locals' names. We are assuming that the stack locals'
+ // slots are allocated in increasing order, so we can simply add
+ // them to the ScopeInfo object.
+ ASSERT(index == scope_info->StackLocalEntriesIndex());
+ for (int i = 0; i < stack_local_count; ++i) {
+ ASSERT(stack_locals[i]->index() == i);
+ scope_info->set(index++, *stack_locals[i]->name());
+ }
-// Encoding format in a FixedArray object:
-//
-// - function name
-//
-// - calls eval boolean flag
-//
-// - is strict mode scope
-//
-// - is qml mode scope
-//
-// - scope type
-//
-// - number of variables in the context object (smi) (= function context
-// slot index + 1)
-// - list of pairs (name, Var mode) of context-allocated variables (starting
-// with context slot 0)
-//
-// - number of parameters (smi)
-// - list of parameter names (starting with parameter 0 first)
-//
-// - number of variables on the stack (smi)
-// - list of names of stack-allocated variables (starting with stack slot 0)
+ // Due to usage analysis, context-allocated locals are not necessarily in
+ // increasing order: Some of them may be parameters which are allocated before
+ // the non-parameter locals. When the non-parameter locals are sorted
+ // according to usage, the allocated slot indices may not be in increasing
+ // order with the variable list anymore. Thus, we first need to sort them by
+ // context slot index before adding them to the ScopeInfo object.
+ context_locals.Sort(&Variable::CompareIndex);
+
+ // Add context locals' names.
+ ASSERT(index == scope_info->ContextLocalNameEntriesIndex());
+ for (int i = 0; i < context_local_count; ++i) {
+ scope_info->set(index++, *context_locals[i]->name());
+ }
-// The ScopeInfo representation could be simplified and the ScopeInfo
-// re-implemented (with almost the same interface). Here is a
-// suggestion for the new format:
-//
-// - have a single list with all variable names (parameters, stack locals,
-// context locals), followed by a list of non-Object* values containing
-// the variables information (what kind, index, attributes)
-// - searching the linear list of names is fast and yields an index into the
-// list if the variable name is found
-// - that list index is then used to find the variable information in the
-// subsequent list
-// - the list entries don't have to be in any particular order, so all the
-// current sorting business can go away
-// - the ScopeInfo lookup routines can be reduced to perhaps a single lookup
-// which returns all information at once
-// - when gathering the information from a Scope, we only need to iterate
-// through the local variables (parameters and context info is already
-// present)
+ // Add context locals' info.
+ ASSERT(index == scope_info->ContextLocalInfoEntriesIndex());
+ for (int i = 0; i < context_local_count; ++i) {
+ Variable* var = context_locals[i];
+ uint32_t value = ContextLocalMode::encode(var->mode()) |
+ ContextLocalInitFlag::encode(var->initialization_flag());
+ scope_info->set(index++, Smi::FromInt(value));
+ }
+ // If present, add the function variable name and its index.
+ ASSERT(index == scope_info->FunctionNameEntryIndex());
+ if (has_function_name) {
+ int var_index = scope->function()->var()->index();
+ scope_info->set(index++, *scope->function()->name());
+ scope_info->set(index++, Smi::FromInt(var_index));
+ ASSERT(function_name_info != STACK ||
+ (var_index == scope_info->StackLocalCount() &&
+ var_index == scope_info->StackSlotCount() - 1));
+ ASSERT(function_name_info != CONTEXT ||
+ var_index == scope_info->ContextLength() - 1);
+ }
-template <class T>
-static inline Object** ReadInt(Object** p, T* x) {
- *x = static_cast<T>((reinterpret_cast<Smi*>(*p++))->value());
- return p;
+ ASSERT(index == scope_info->length());
+ ASSERT(scope->num_parameters() == scope_info->ParameterCount());
+ ASSERT(scope->num_stack_slots() == scope_info->StackSlotCount());
+ ASSERT(scope->num_heap_slots() == scope_info->ContextLength());
+ return scope_info;
}
-static inline Object** ReadBool(Object** p, bool* x) {
- *x = (reinterpret_cast<Smi*>(*p++))->value() != 0;
- return p;
+ScopeInfo* ScopeInfo::Empty() {
+ return reinterpret_cast<ScopeInfo*>(HEAP->empty_fixed_array());
}
-template <class T>
-static inline Object** ReadObject(Object** p, Handle<T>* s) {
- *s = Handle<T>::cast(Handle<Object>(*p++));
- return p;
+ScopeType ScopeInfo::Type() {
+ ASSERT(length() > 0);
+ return TypeField::decode(Flags());
}
-template <class Allocator, class T>
-static Object** ReadList(Object** p, List<Handle<T>, Allocator >* list) {
- ASSERT(list->is_empty());
- int n;
- p = ReadInt(p, &n);
- while (n-- > 0) {
- Handle<T> s;
- p = ReadObject(p, &s);
- list->Add(s);
- }
- return p;
-}
-
-
-template <class Allocator>
-static Object** ReadList(Object** p,
- List<Handle<String>, Allocator>* list,
- List<VariableMode, Allocator>* modes) {
- ASSERT(list->is_empty());
- int n;
- p = ReadInt(p, &n);
- while (n-- > 0) {
- Handle<String> s;
- int m;
- p = ReadObject(p, &s);
- p = ReadInt(p, &m);
- list->Add(s);
- modes->Add(static_cast<VariableMode>(m));
- }
- return p;
-}
-
-
-template<class Allocator>
-ScopeInfo<Allocator>::ScopeInfo(SerializedScopeInfo* data)
- : function_name_(FACTORY->empty_symbol()),
- parameters_(4),
- stack_slots_(8),
- context_slots_(8),
- context_modes_(8) {
- if (data->length() > 0) {
- Object** p0 = data->data_start();
- Object** p = p0;
- p = ReadObject(p, &function_name_);
- p = ReadBool(p, &calls_eval_);
- p = ReadBool(p, &is_strict_mode_);
- p = ReadBool(p, &is_qml_mode_);
- p = ReadInt(p, &type_);
- p = ReadList<Allocator>(p, &context_slots_, &context_modes_);
- p = ReadList<Allocator>(p, &parameters_);
- p = ReadList<Allocator>(p, &stack_slots_);
- ASSERT((p - p0) == FixedArray::cast(data)->length());
- }
+bool ScopeInfo::CallsEval() {
+ return length() > 0 && CallsEvalField::decode(Flags());
}
-static inline Object** WriteInt(Object** p, int x) {
- *p++ = Smi::FromInt(x);
- return p;
+LanguageMode ScopeInfo::language_mode() {
+ return length() > 0 ? LanguageModeField::decode(Flags()) : CLASSIC_MODE;
}
-static inline Object** WriteBool(Object** p, bool b) {
- *p++ = Smi::FromInt(b ? 1 : 0);
- return p;
+int ScopeInfo::LocalCount() {
+ return StackLocalCount() + ContextLocalCount();
}
-template <class T>
-static inline Object** WriteObject(Object** p, Handle<T> s) {
- *p++ = *s;
- return p;
+int ScopeInfo::StackSlotCount() {
+ if (length() > 0) {
+ bool function_name_stack_slot =
+ FunctionVariableField::decode(Flags()) == STACK;
+ return StackLocalCount() + (function_name_stack_slot ? 1 : 0);
+ }
+ return 0;
}
-template <class Allocator, class T>
-static Object** WriteList(Object** p, List<Handle<T>, Allocator >* list) {
- const int n = list->length();
- p = WriteInt(p, n);
- for (int i = 0; i < n; i++) {
- p = WriteObject(p, list->at(i));
+int ScopeInfo::ContextLength() {
+ if (length() > 0) {
+ int context_locals = ContextLocalCount();
+ bool function_name_context_slot =
+ FunctionVariableField::decode(Flags()) == CONTEXT;
+ bool has_context = context_locals > 0 ||
+ function_name_context_slot ||
+ Type() == WITH_SCOPE ||
+ (Type() == FUNCTION_SCOPE && CallsEval());
+ if (has_context) {
+ return Context::MIN_CONTEXT_SLOTS + context_locals +
+ (function_name_context_slot ? 1 : 0);
+ }
}
- return p;
+ return 0;
}
-template <class Allocator>
-static Object** WriteList(Object** p,
- List<Handle<String>, Allocator>* list,
- List<VariableMode, Allocator>* modes) {
- const int n = list->length();
- p = WriteInt(p, n);
- for (int i = 0; i < n; i++) {
- p = WriteObject(p, list->at(i));
- p = WriteInt(p, modes->at(i));
- }
- return p;
-}
-
-
-template<class Allocator>
-Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() {
- // function name, calls eval, is_strict_mode, is_qml_mode, scope type,
- // length for 3 tables:
- const int extra_slots = 1 + 1 + 1 + 1 + 1 + 3;
- int length = extra_slots +
- context_slots_.length() * 2 +
- parameters_.length() +
- stack_slots_.length();
-
- Handle<SerializedScopeInfo> data(
- SerializedScopeInfo::cast(*FACTORY->NewSerializedScopeInfo(length)));
- AssertNoAllocation nogc;
-
- Object** p0 = data->data_start();
- Object** p = p0;
- p = WriteObject(p, function_name_);
- p = WriteBool(p, calls_eval_);
- p = WriteBool(p, is_strict_mode_);
- p = WriteBool(p, is_qml_mode_);
- p = WriteInt(p, type_);
- p = WriteList(p, &context_slots_, &context_modes_);
- p = WriteList(p, &parameters_);
- p = WriteList(p, &stack_slots_);
- ASSERT((p - p0) == length);
-
- return data;
-}
-
-
-template<class Allocator>
-Handle<String> ScopeInfo<Allocator>::LocalName(int i) const {
- // A local variable can be allocated either on the stack or in the context.
- // For variables allocated in the context they are always preceded by
- // Context::MIN_CONTEXT_SLOTS of fixed allocated slots in the context.
- if (i < number_of_stack_slots()) {
- return stack_slot_name(i);
+bool ScopeInfo::HasFunctionName() {
+ if (length() > 0) {
+ return NONE != FunctionVariableField::decode(Flags());
} else {
- return context_slot_name(i - number_of_stack_slots() +
- Context::MIN_CONTEXT_SLOTS);
+ return false;
}
}
-template<class Allocator>
-int ScopeInfo<Allocator>::NumberOfLocals() const {
- int number_of_locals = number_of_stack_slots();
- if (number_of_context_slots() > 0) {
- ASSERT(number_of_context_slots() >= Context::MIN_CONTEXT_SLOTS);
- number_of_locals += number_of_context_slots() - Context::MIN_CONTEXT_SLOTS;
+bool ScopeInfo::HasHeapAllocatedLocals() {
+ if (length() > 0) {
+ return ContextLocalCount() > 0;
+ } else {
+ return false;
}
- return number_of_locals;
-}
-
-
-Handle<SerializedScopeInfo> SerializedScopeInfo::Create(Scope* scope) {
- ScopeInfo<ZoneListAllocationPolicy> sinfo(scope);
- return sinfo.Serialize();
-}
-
-
-SerializedScopeInfo* SerializedScopeInfo::Empty() {
- return reinterpret_cast<SerializedScopeInfo*>(HEAP->empty_fixed_array());
-}
-
-
-Object** SerializedScopeInfo::ContextEntriesAddr() {
- ASSERT(length() > 0);
- // +5 for function name, calls eval, strict mode, qml mode, scope type.
- return data_start() + 5;
}
-Object** SerializedScopeInfo::ParameterEntriesAddr() {
- ASSERT(length() > 0);
- Object** p = ContextEntriesAddr();
- int number_of_context_slots;
- p = ReadInt(p, &number_of_context_slots);
- return p + number_of_context_slots*2; // *2 for pairs
-}
-
-
-Object** SerializedScopeInfo::StackSlotEntriesAddr() {
- ASSERT(length() > 0);
- Object** p = ParameterEntriesAddr();
- int number_of_parameter_slots;
- p = ReadInt(p, &number_of_parameter_slots);
- return p + number_of_parameter_slots;
-}
-
-
-bool SerializedScopeInfo::CallsEval() {
+bool ScopeInfo::HasContext() {
if (length() > 0) {
- Object** p = data_start() + 1; // +1 for function name.
- bool calls_eval;
- p = ReadBool(p, &calls_eval);
- return calls_eval;
+ return ContextLength() > 0;
+ } else {
+ return false;
}
- return false;
}
-bool SerializedScopeInfo::IsStrictMode() {
- if (length() > 0) {
- Object** p = data_start() + 2; // +2 for function name, calls eval.
- bool strict_mode;
- p = ReadBool(p, &strict_mode);
- return strict_mode;
- }
- return false;
+String* ScopeInfo::FunctionName() {
+ ASSERT(HasFunctionName());
+ return String::cast(get(FunctionNameEntryIndex()));
}
-bool SerializedScopeInfo::IsQmlMode() {
- if (length() > 0) {
- Object** p = data_start() + 3; // +3 for function name, calls eval, strict mode.
- bool qml_mode;
- p = ReadBool(p, &qml_mode);
- return qml_mode;
- }
- return false;
+String* ScopeInfo::ParameterName(int var) {
+ ASSERT(0 <= var && var < ParameterCount());
+ int info_index = ParameterEntriesIndex() + var;
+ return String::cast(get(info_index));
}
-ScopeType SerializedScopeInfo::Type() {
- ASSERT(length() > 0);
- // +4 for function name, calls eval, strict mode, qml mode.
- Object** p = data_start() + 4;
- ScopeType type;
- p = ReadInt(p, &type);
- return type;
+String* ScopeInfo::LocalName(int var) {
+ ASSERT(0 <= var && var < LocalCount());
+ ASSERT(StackLocalEntriesIndex() + StackLocalCount() ==
+ ContextLocalNameEntriesIndex());
+ int info_index = StackLocalEntriesIndex() + var;
+ return String::cast(get(info_index));
}
-int SerializedScopeInfo::NumberOfStackSlots() {
- if (length() > 0) {
- Object** p = StackSlotEntriesAddr();
- int number_of_stack_slots;
- ReadInt(p, &number_of_stack_slots);
- return number_of_stack_slots;
- }
- return 0;
+String* ScopeInfo::StackLocalName(int var) {
+ ASSERT(0 <= var && var < StackLocalCount());
+ int info_index = StackLocalEntriesIndex() + var;
+ return String::cast(get(info_index));
}
-int SerializedScopeInfo::NumberOfContextSlots() {
- if (length() > 0) {
- Object** p = ContextEntriesAddr();
- int number_of_context_slots;
- ReadInt(p, &number_of_context_slots);
- return number_of_context_slots + Context::MIN_CONTEXT_SLOTS;
- }
- return 0;
+String* ScopeInfo::ContextLocalName(int var) {
+ ASSERT(0 <= var && var < ContextLocalCount());
+ int info_index = ContextLocalNameEntriesIndex() + var;
+ return String::cast(get(info_index));
}
-bool SerializedScopeInfo::HasHeapAllocatedLocals() {
- if (length() > 0) {
- Object** p = ContextEntriesAddr();
- int number_of_context_slots;
- ReadInt(p, &number_of_context_slots);
- return number_of_context_slots > 0;
- }
- return false;
+VariableMode ScopeInfo::ContextLocalMode(int var) {
+ ASSERT(0 <= var && var < ContextLocalCount());
+ int info_index = ContextLocalInfoEntriesIndex() + var;
+ int value = Smi::cast(get(info_index))->value();
+ return ContextLocalMode::decode(value);
}
-bool SerializedScopeInfo::HasContext() {
- return HasHeapAllocatedLocals() ||
- Type() == WITH_SCOPE;
+InitializationFlag ScopeInfo::ContextLocalInitFlag(int var) {
+ ASSERT(0 <= var && var < ContextLocalCount());
+ int info_index = ContextLocalInfoEntriesIndex() + var;
+ int value = Smi::cast(get(info_index))->value();
+ return ContextLocalInitFlag::decode(value);
}
-int SerializedScopeInfo::StackSlotIndex(String* name) {
+int ScopeInfo::StackSlotIndex(String* name) {
ASSERT(name->IsSymbol());
if (length() > 0) {
- // Slots start after length entry.
- Object** p0 = StackSlotEntriesAddr();
- int number_of_stack_slots;
- p0 = ReadInt(p0, &number_of_stack_slots);
- Object** p = p0;
- Object** end = p0 + number_of_stack_slots;
- while (p != end) {
- if (*p == name) return static_cast<int>(p - p0);
- p++;
+ int start = StackLocalEntriesIndex();
+ int end = StackLocalEntriesIndex() + StackLocalCount();
+ for (int i = start; i < end; ++i) {
+ if (name == get(i)) {
+ return i - start;
+ }
}
}
return -1;
}
-int SerializedScopeInfo::ContextSlotIndex(String* name, VariableMode* mode) {
+
+int ScopeInfo::ContextSlotIndex(String* name,
+ VariableMode* mode,
+ InitializationFlag* init_flag) {
ASSERT(name->IsSymbol());
- Isolate* isolate = GetIsolate();
- int result = isolate->context_slot_cache()->Lookup(this, name, mode);
- if (result != ContextSlotCache::kNotFound) return result;
+ ASSERT(mode != NULL);
+ ASSERT(init_flag != NULL);
if (length() > 0) {
- // Slots start after length entry.
- Object** p0 = ContextEntriesAddr();
- int number_of_context_slots;
- p0 = ReadInt(p0, &number_of_context_slots);
- Object** p = p0;
- Object** end = p0 + number_of_context_slots * 2;
- while (p != end) {
- if (*p == name) {
- ASSERT(((p - p0) & 1) == 0);
- int v;
- ReadInt(p + 1, &v);
- VariableMode mode_value = static_cast<VariableMode>(v);
- if (mode != NULL) *mode = mode_value;
- result = static_cast<int>((p - p0) >> 1) + Context::MIN_CONTEXT_SLOTS;
- isolate->context_slot_cache()->Update(this, name, mode_value, result);
+ ContextSlotCache* context_slot_cache = GetIsolate()->context_slot_cache();
+ int result = context_slot_cache->Lookup(this, name, mode, init_flag);
+ if (result != ContextSlotCache::kNotFound) {
+ ASSERT(result < ContextLength());
+ return result;
+ }
+
+ int start = ContextLocalNameEntriesIndex();
+ int end = ContextLocalNameEntriesIndex() + ContextLocalCount();
+ for (int i = start; i < end; ++i) {
+ if (name == get(i)) {
+ int var = i - start;
+ *mode = ContextLocalMode(var);
+ *init_flag = ContextLocalInitFlag(var);
+ result = Context::MIN_CONTEXT_SLOTS + var;
+ context_slot_cache->Update(this, name, *mode, *init_flag, result);
+ ASSERT(result < ContextLength());
return result;
}
- p += 2;
}
+ context_slot_cache->Update(this, name, INTERNAL, kNeedsInitialization, -1);
}
- isolate->context_slot_cache()->Update(this, name, INTERNAL, -1);
return -1;
}
-int SerializedScopeInfo::ParameterIndex(String* name) {
+int ScopeInfo::ParameterIndex(String* name) {
ASSERT(name->IsSymbol());
if (length() > 0) {
// We must read parameters from the end since for
@@ -538,49 +336,58 @@ int SerializedScopeInfo::ParameterIndex(String* name) {
// last declaration of that parameter is used
// inside a function (and thus we need to look
// at the last index). Was bug# 1110337.
- //
- // Eventually, we should only register such parameters
- // once, with corresponding index. This requires a new
- // implementation of the ScopeInfo code. See also other
- // comments in this file regarding this.
- Object** p = ParameterEntriesAddr();
- int number_of_parameter_slots;
- Object** p0 = ReadInt(p, &number_of_parameter_slots);
- p = p0 + number_of_parameter_slots;
- while (p > p0) {
- p--;
- if (*p == name) return static_cast<int>(p - p0);
+ int start = ParameterEntriesIndex();
+ int end = ParameterEntriesIndex() + ParameterCount();
+ for (int i = end - 1; i >= start; --i) {
+ if (name == get(i)) {
+ return i - start;
+ }
}
}
return -1;
}
-int SerializedScopeInfo::FunctionContextSlotIndex(String* name,
- VariableMode* mode) {
+int ScopeInfo::FunctionContextSlotIndex(String* name, VariableMode* mode) {
ASSERT(name->IsSymbol());
+ ASSERT(mode != NULL);
if (length() > 0) {
- Object** p = data_start();
- if (*p == name) {
- p = ContextEntriesAddr();
- int number_of_context_slots;
- p = ReadInt(p, &number_of_context_slots);
- ASSERT(number_of_context_slots != 0);
- // The function context slot is the last entry.
- if (mode != NULL) {
- // Seek to context slot entry.
- p += (number_of_context_slots - 1) * 2;
- // Seek to mode.
- ++p;
- ReadInt(p, mode);
- }
- return number_of_context_slots + Context::MIN_CONTEXT_SLOTS - 1;
+ if (FunctionVariableField::decode(Flags()) == CONTEXT &&
+ FunctionName() == name) {
+ *mode = FunctionVariableMode::decode(Flags());
+ return Smi::cast(get(FunctionNameEntryIndex() + 1))->value();
}
}
return -1;
}
+int ScopeInfo::ParameterEntriesIndex() {
+ ASSERT(length() > 0);
+ return kVariablePartIndex;
+}
+
+
+int ScopeInfo::StackLocalEntriesIndex() {
+ return ParameterEntriesIndex() + ParameterCount();
+}
+
+
+int ScopeInfo::ContextLocalNameEntriesIndex() {
+ return StackLocalEntriesIndex() + StackLocalCount();
+}
+
+
+int ScopeInfo::ContextLocalInfoEntriesIndex() {
+ return ContextLocalNameEntriesIndex() + ContextLocalCount();
+}
+
+
+int ScopeInfo::FunctionNameEntryIndex() {
+ return ContextLocalInfoEntriesIndex() + ContextLocalCount();
+}
+
+
int ContextSlotCache::Hash(Object* data, String* name) {
// Uses only lower 32 bits if pointers are larger.
uintptr_t addr_hash =
@@ -591,12 +398,14 @@ int ContextSlotCache::Hash(Object* data, String* name) {
int ContextSlotCache::Lookup(Object* data,
String* name,
- VariableMode* mode) {
+ VariableMode* mode,
+ InitializationFlag* init_flag) {
int index = Hash(data, name);
Key& key = keys_[index];
if ((key.data == data) && key.name->Equals(name)) {
Value result(values_[index]);
if (mode != NULL) *mode = result.mode();
+ if (init_flag != NULL) *init_flag = result.initialization_flag();
return result.index() + kNotFound;
}
return kNotFound;
@@ -606,6 +415,7 @@ int ContextSlotCache::Lookup(Object* data,
void ContextSlotCache::Update(Object* data,
String* name,
VariableMode mode,
+ InitializationFlag init_flag,
int slot_index) {
String* symbol;
ASSERT(slot_index > kNotFound);
@@ -615,9 +425,9 @@ void ContextSlotCache::Update(Object* data,
key.data = data;
key.name = symbol;
// Please note value only takes a uint as index.
- values_[index] = Value(mode, slot_index - kNotFound).raw();
+ values_[index] = Value(mode, init_flag, slot_index - kNotFound).raw();
#ifdef DEBUG
- ValidateEntry(data, name, mode, slot_index);
+ ValidateEntry(data, name, mode, init_flag, slot_index);
#endif
}
}
@@ -633,6 +443,7 @@ void ContextSlotCache::Clear() {
void ContextSlotCache::ValidateEntry(Object* data,
String* name,
VariableMode mode,
+ InitializationFlag init_flag,
int slot_index) {
String* symbol;
if (HEAP->LookupSymbolIfExists(name, &symbol)) {
@@ -642,51 +453,56 @@ void ContextSlotCache::ValidateEntry(Object* data,
ASSERT(key.name->Equals(name));
Value result(values_[index]);
ASSERT(result.mode() == mode);
+ ASSERT(result.initialization_flag() == init_flag);
ASSERT(result.index() + kNotFound == slot_index);
}
}
-template <class Allocator>
static void PrintList(const char* list_name,
int nof_internal_slots,
- List<Handle<String>, Allocator>& list) {
- if (list.length() > 0) {
+ int start,
+ int end,
+ ScopeInfo* scope_info) {
+ if (start < end) {
PrintF("\n // %s\n", list_name);
if (nof_internal_slots > 0) {
PrintF(" %2d - %2d [internal slots]\n", 0 , nof_internal_slots - 1);
}
- for (int i = 0; i < list.length(); i++) {
- PrintF(" %2d ", i + nof_internal_slots);
- list[i]->ShortPrint();
+ for (int i = nof_internal_slots; start < end; ++i, ++start) {
+ PrintF(" %2d ", i);
+ String::cast(scope_info->get(start))->ShortPrint();
PrintF("\n");
}
}
}
-template<class Allocator>
-void ScopeInfo<Allocator>::Print() {
+void ScopeInfo::Print() {
PrintF("ScopeInfo ");
- if (function_name_->length() > 0)
- function_name_->ShortPrint();
- else
+ if (HasFunctionName()) {
+ FunctionName()->ShortPrint();
+ } else {
PrintF("/* no function name */");
+ }
PrintF("{");
- PrintList<Allocator>("parameters", 0, parameters_);
- PrintList<Allocator>("stack slots", 0, stack_slots_);
- PrintList<Allocator>("context slots", Context::MIN_CONTEXT_SLOTS,
- context_slots_);
+ PrintList("parameters", 0,
+ ParameterEntriesIndex(),
+ ParameterEntriesIndex() + ParameterCount(),
+ this);
+ PrintList("stack slots", 0,
+ StackLocalEntriesIndex(),
+ StackLocalEntriesIndex() + StackLocalCount(),
+ this);
+ PrintList("context slots",
+ Context::MIN_CONTEXT_SLOTS,
+ ContextLocalNameEntriesIndex(),
+ ContextLocalNameEntriesIndex() + ContextLocalCount(),
+ this);
PrintF("}\n");
}
#endif // DEBUG
-
-// Make sure the classes get instantiated by the template system.
-template class ScopeInfo<FreeStoreAllocationPolicy>;
-template class ScopeInfo<PreallocatedStorage>;
-template class ScopeInfo<ZoneListAllocationPolicy>;
-
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/scopeinfo.h b/src/3rdparty/v8/src/scopeinfo.h
index 2ca4503..93734f5 100644
--- a/src/3rdparty/v8/src/scopeinfo.h
+++ b/src/3rdparty/v8/src/scopeinfo.h
@@ -35,68 +35,6 @@
namespace v8 {
namespace internal {
-// ScopeInfo represents information about different scopes of a source
-// program and the allocation of the scope's variables. Scope information
-// is stored in a compressed form in SerializedScopeInfo objects and is used
-// at runtime (stack dumps, deoptimization, etc.).
-
-// Forward defined as
-// template <class Allocator = FreeStoreAllocationPolicy> class ScopeInfo;
-template<class Allocator>
-class ScopeInfo BASE_EMBEDDED {
- public:
- // Create a ScopeInfo instance from a scope.
- explicit ScopeInfo(Scope* scope);
-
- // Create a ScopeInfo instance from SerializedScopeInfo.
- explicit ScopeInfo(SerializedScopeInfo* data);
-
- // Creates a SerializedScopeInfo holding the serialized scope info.
- Handle<SerializedScopeInfo> Serialize();
-
- // --------------------------------------------------------------------------
- // Lookup
-
- Handle<String> function_name() const { return function_name_; }
-
- Handle<String> parameter_name(int i) const { return parameters_[i]; }
- int number_of_parameters() const { return parameters_.length(); }
-
- Handle<String> stack_slot_name(int i) const { return stack_slots_[i]; }
- int number_of_stack_slots() const { return stack_slots_.length(); }
-
- Handle<String> context_slot_name(int i) const {
- return context_slots_[i - Context::MIN_CONTEXT_SLOTS];
- }
- int number_of_context_slots() const {
- int l = context_slots_.length();
- return l == 0 ? 0 : l + Context::MIN_CONTEXT_SLOTS;
- }
-
- Handle<String> LocalName(int i) const;
- int NumberOfLocals() const;
-
- ScopeType type() const { return type_; }
- // --------------------------------------------------------------------------
- // Debugging support
-
-#ifdef DEBUG
- void Print();
-#endif
-
- private:
- Handle<String> function_name_;
- bool calls_eval_;
- bool is_strict_mode_;
- bool is_qml_mode_;
- ScopeType type_;
- List<Handle<String>, Allocator > parameters_;
- List<Handle<String>, Allocator > stack_slots_;
- List<Handle<String>, Allocator > context_slots_;
- List<VariableMode, Allocator > context_modes_;
-};
-
-
// Cache for mapping (data, property name) into context slot index.
// The cache contains both positive and negative results.
// Slot index equals -1 means the property is absent.
@@ -107,12 +45,14 @@ class ContextSlotCache {
// If absent, kNotFound is returned.
int Lookup(Object* data,
String* name,
- VariableMode* mode);
+ VariableMode* mode,
+ InitializationFlag* init_flag);
// Update an element in the cache.
void Update(Object* data,
String* name,
VariableMode mode,
+ InitializationFlag init_flag,
int slot_index);
// Clear the cache.
@@ -135,6 +75,7 @@ class ContextSlotCache {
void ValidateEntry(Object* data,
String* name,
VariableMode mode,
+ InitializationFlag init_flag,
int slot_index);
#endif
@@ -145,11 +86,17 @@ class ContextSlotCache {
};
struct Value {
- Value(VariableMode mode, int index) {
+ Value(VariableMode mode,
+ InitializationFlag init_flag,
+ int index) {
ASSERT(ModeField::is_valid(mode));
+ ASSERT(InitField::is_valid(init_flag));
ASSERT(IndexField::is_valid(index));
- value_ = ModeField::encode(mode) | IndexField::encode(index);
+ value_ = ModeField::encode(mode) |
+ IndexField::encode(index) |
+ InitField::encode(init_flag);
ASSERT(mode == this->mode());
+ ASSERT(init_flag == this->initialization_flag());
ASSERT(index == this->index());
}
@@ -159,12 +106,18 @@ class ContextSlotCache {
VariableMode mode() { return ModeField::decode(value_); }
+ InitializationFlag initialization_flag() {
+ return InitField::decode(value_);
+ }
+
int index() { return IndexField::decode(value_); }
// Bit fields in value_ (type, shift, size). Must be public so the
// constants can be embedded in generated code.
- class ModeField: public BitField<VariableMode, 0, 3> {};
- class IndexField: public BitField<int, 3, 32-3> {};
+ class ModeField: public BitField<VariableMode, 0, 3> {};
+ class InitField: public BitField<InitializationFlag, 3, 1> {};
+ class IndexField: public BitField<int, 4, 32-4> {};
+
private:
uint32_t value_;
};
diff --git a/src/3rdparty/v8/src/scopes.cc b/src/3rdparty/v8/src/scopes.cc
index 25057e1..859cbd1 100644
--- a/src/3rdparty/v8/src/scopes.cc
+++ b/src/3rdparty/v8/src/scopes.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,36 +31,15 @@
#include "bootstrapper.h"
#include "compiler.h"
+#include "messages.h"
#include "scopeinfo.h"
#include "allocation-inl.h"
-#include "debug.h"
-
namespace v8 {
namespace internal {
// ----------------------------------------------------------------------------
-// A Zone allocator for use with LocalsMap.
-
-// TODO(isolates): It is probably worth it to change the Allocator class to
-// take a pointer to an isolate.
-class ZoneAllocator: public Allocator {
- public:
- /* nothing to do */
- virtual ~ZoneAllocator() {}
-
- virtual void* New(size_t size) { return ZONE->New(static_cast<int>(size)); }
-
- /* ignored - Zone is freed in one fell swoop */
- virtual void Delete(void* p) {}
-};
-
-
-static ZoneAllocator LocalsMapAllocator;
-
-
-// ----------------------------------------------------------------------------
// Implementation of LocalsMap
//
// Note: We are storing the handle locations as key values in the hash map.
@@ -78,27 +57,36 @@ static bool Match(void* key1, void* key2) {
}
-VariableMap::VariableMap() : HashMap(Match, &LocalsMapAllocator, 8) {}
+VariableMap::VariableMap() : ZoneHashMap(Match, 8) {}
VariableMap::~VariableMap() {}
-Variable* VariableMap::Declare(Scope* scope,
- Handle<String> name,
- VariableMode mode,
- bool is_valid_lhs,
- Variable::Kind kind) {
- HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), true);
+Variable* VariableMap::Declare(
+ Scope* scope,
+ Handle<String> name,
+ VariableMode mode,
+ bool is_valid_lhs,
+ Variable::Kind kind,
+ InitializationFlag initialization_flag,
+ Interface* interface) {
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), true);
if (p->value == NULL) {
// The variable has not been declared yet -> insert it.
ASSERT(p->key == name.location());
- p->value = new Variable(scope, name, mode, is_valid_lhs, kind);
+ p->value = new Variable(scope,
+ name,
+ mode,
+ is_valid_lhs,
+ kind,
+ initialization_flag,
+ interface);
}
return reinterpret_cast<Variable*>(p->value);
}
Variable* VariableMap::Lookup(Handle<String> name) {
- HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), false);
+ Entry* p = ZoneHashMap::Lookup(name.location(), name->Hash(), false);
if (p != NULL) {
ASSERT(*reinterpret_cast<String**>(p->key) == *name);
ASSERT(p->value != NULL);
@@ -119,19 +107,22 @@ Scope::Scope(Scope* outer_scope, ScopeType type)
params_(4),
unresolved_(16),
decls_(4),
+ interface_(FLAG_harmony_modules &&
+ (type == MODULE_SCOPE || type == GLOBAL_SCOPE)
+ ? Interface::NewModule() : NULL),
already_resolved_(false) {
- SetDefaults(type, outer_scope, Handle<SerializedScopeInfo>::null());
+ SetDefaults(type, outer_scope, Handle<ScopeInfo>::null());
// At some point we might want to provide outer scopes to
// eval scopes (by walking the stack and reading the scope info).
// In that case, the ASSERT below needs to be adjusted.
- ASSERT((type == GLOBAL_SCOPE || type == EVAL_SCOPE) == (outer_scope == NULL));
+ ASSERT_EQ(type == GLOBAL_SCOPE, outer_scope == NULL);
ASSERT(!HasIllegalRedeclaration());
}
Scope::Scope(Scope* inner_scope,
ScopeType type,
- Handle<SerializedScopeInfo> scope_info)
+ Handle<ScopeInfo> scope_info)
: isolate_(Isolate::Current()),
inner_scopes_(4),
variables_(),
@@ -139,11 +130,15 @@ Scope::Scope(Scope* inner_scope,
params_(4),
unresolved_(16),
decls_(4),
+ interface_(NULL),
already_resolved_(true) {
SetDefaults(type, NULL, scope_info);
- if (!scope_info.is_null() && scope_info->HasHeapAllocatedLocals()) {
- num_heap_slots_ = scope_info_->NumberOfContextSlots();
+ if (!scope_info.is_null()) {
+ num_heap_slots_ = scope_info_->ContextLength();
}
+ // Ensure at least MIN_CONTEXT_SLOTS to indicate a materialized context.
+ num_heap_slots_ = Max(num_heap_slots_,
+ static_cast<int>(Context::MIN_CONTEXT_SLOTS));
AddInnerScope(inner_scope);
}
@@ -156,22 +151,25 @@ Scope::Scope(Scope* inner_scope, Handle<String> catch_variable_name)
params_(0),
unresolved_(0),
decls_(0),
+ interface_(NULL),
already_resolved_(true) {
- SetDefaults(CATCH_SCOPE, NULL, Handle<SerializedScopeInfo>::null());
+ SetDefaults(CATCH_SCOPE, NULL, Handle<ScopeInfo>::null());
AddInnerScope(inner_scope);
++num_var_or_const_;
+ num_heap_slots_ = Context::MIN_CONTEXT_SLOTS;
Variable* variable = variables_.Declare(this,
catch_variable_name,
VAR,
true, // Valid left-hand side.
- Variable::NORMAL);
+ Variable::NORMAL,
+ kCreatedInitialized);
AllocateHeapSlot(variable);
}
void Scope::SetDefaults(ScopeType type,
Scope* outer_scope,
- Handle<SerializedScopeInfo> scope_info) {
+ Handle<ScopeInfo> scope_info) {
outer_scope_ = outer_scope;
type_ = type;
scope_name_ = isolate_->factory()->empty_symbol();
@@ -184,9 +182,8 @@ void Scope::SetDefaults(ScopeType type,
scope_contains_with_ = false;
scope_calls_eval_ = false;
// Inherit the strict mode from the parent scope.
- strict_mode_flag_ = (outer_scope != NULL)
- ? outer_scope->strict_mode_flag_ : kNonStrictMode;
- qml_mode_ = (outer_scope != NULL) && outer_scope->qml_mode_;
+ language_mode_ = (outer_scope != NULL)
+ ? outer_scope->language_mode_ : CLASSIC_MODE;
outer_scope_calls_non_strict_eval_ = false;
inner_scope_calls_eval_ = false;
force_eager_compilation_ = false;
@@ -196,21 +193,23 @@ void Scope::SetDefaults(ScopeType type,
scope_info_ = scope_info;
start_position_ = RelocInfo::kNoPosition;
end_position_ = RelocInfo::kNoPosition;
+ if (!scope_info.is_null()) {
+ scope_calls_eval_ = scope_info->CallsEval();
+ language_mode_ = scope_info->language_mode();
+ }
}
-Scope* Scope::DeserializeScopeChain(CompilationInfo* info,
- Scope* global_scope) {
+Scope* Scope::DeserializeScopeChain(Context* context, Scope* global_scope) {
// Reconstruct the outer scope chain from a closure's context chain.
- ASSERT(!info->closure().is_null());
- Context* context = info->closure()->context();
Scope* current_scope = NULL;
Scope* innermost_scope = NULL;
bool contains_with = false;
while (!context->IsGlobalContext()) {
if (context->IsWithContext()) {
- Scope* with_scope = new Scope(current_scope, WITH_SCOPE,
- Handle<SerializedScopeInfo>::null());
+ Scope* with_scope = new Scope(current_scope,
+ WITH_SCOPE,
+ Handle<ScopeInfo>::null());
current_scope = with_scope;
// All the inner scopes are inside a with.
contains_with = true;
@@ -218,15 +217,15 @@ Scope* Scope::DeserializeScopeChain(CompilationInfo* info,
s->scope_inside_with_ = true;
}
} else if (context->IsFunctionContext()) {
- SerializedScopeInfo* scope_info =
- context->closure()->shared()->scope_info();
- current_scope = new Scope(current_scope, FUNCTION_SCOPE,
- Handle<SerializedScopeInfo>(scope_info));
+ ScopeInfo* scope_info = context->closure()->shared()->scope_info();
+ current_scope = new Scope(current_scope,
+ FUNCTION_SCOPE,
+ Handle<ScopeInfo>(scope_info));
} else if (context->IsBlockContext()) {
- SerializedScopeInfo* scope_info =
- SerializedScopeInfo::cast(context->extension());
- current_scope = new Scope(current_scope, BLOCK_SCOPE,
- Handle<SerializedScopeInfo>(scope_info));
+ ScopeInfo* scope_info = ScopeInfo::cast(context->extension());
+ current_scope = new Scope(current_scope,
+ BLOCK_SCOPE,
+ Handle<ScopeInfo>(scope_info));
} else {
ASSERT(context->IsCatchContext());
String* name = String::cast(context->extension());
@@ -243,27 +242,61 @@ Scope* Scope::DeserializeScopeChain(CompilationInfo* info,
}
global_scope->AddInnerScope(current_scope);
+ global_scope->PropagateScopeInfo(false);
return (innermost_scope == NULL) ? global_scope : innermost_scope;
}
bool Scope::Analyze(CompilationInfo* info) {
ASSERT(info->function() != NULL);
- Scope* top = info->function()->scope();
+ Scope* scope = info->function()->scope();
+ Scope* top = scope;
+
+ // Traverse the scope tree up to the first unresolved scope or the global
+ // scope and start scope resolution and variable allocation from that scope.
+ while (!top->is_global_scope() &&
+ !top->outer_scope()->already_resolved()) {
+ top = top->outer_scope();
+ }
- while (top->outer_scope() != NULL) top = top->outer_scope();
- top->AllocateVariables(info->calling_context());
+ // Allocate the variables.
+ {
+ AstNodeFactory<AstNullVisitor> ast_node_factory(info->isolate());
+ if (!top->AllocateVariables(info, &ast_node_factory)) return false;
+ }
#ifdef DEBUG
if (info->isolate()->bootstrapper()->IsActive()
? FLAG_print_builtin_scopes
: FLAG_print_scopes) {
- info->function()->scope()->Print();
+ scope->Print();
+ }
+
+ if (FLAG_harmony_modules && FLAG_print_interfaces && top->is_global_scope()) {
+ PrintF("global : ");
+ top->interface()->Print();
}
#endif
- info->SetScope(info->function()->scope());
- return true; // Can not fail.
+ if (FLAG_harmony_scoping) {
+ VariableProxy* proxy = scope->CheckAssignmentToConst();
+ if (proxy != NULL) {
+ // Found an assignment to const. Throw a syntax error.
+ MessageLocation location(info->script(),
+ proxy->position(),
+ proxy->position());
+ Isolate* isolate = info->isolate();
+ Factory* factory = isolate->factory();
+ Handle<JSArray> array = factory->NewJSArray(0);
+ Handle<Object> result =
+ factory->NewSyntaxError("harmony_const_assign", array);
+ isolate->Throw(*result, &location);
+ return false;
+ }
+ }
+
+ info->SetScope(scope);
+ return true;
}
@@ -292,7 +325,8 @@ void Scope::Initialize() {
isolate_->factory()->this_symbol(),
VAR,
false,
- Variable::THIS);
+ Variable::THIS,
+ kCreatedInitialized);
var->AllocateTo(Variable::PARAMETER, -1);
receiver_ = var;
} else {
@@ -308,7 +342,8 @@ void Scope::Initialize() {
isolate_->factory()->arguments_symbol(),
VAR,
true,
- Variable::ARGUMENTS);
+ Variable::ARGUMENTS,
+ kCreatedInitialized);
}
}
@@ -348,34 +383,51 @@ Variable* Scope::LocalLookup(Handle<String> name) {
return result;
}
// If we have a serialized scope info, we might find the variable there.
- //
- // We should never lookup 'arguments' in this scope as it is implicitly
- // present in every scope.
- ASSERT(*name != *isolate_->factory()->arguments_symbol());
// There should be no local slot with the given name.
ASSERT(scope_info_->StackSlotIndex(*name) < 0);
// Check context slot lookup.
VariableMode mode;
- int index = scope_info_->ContextSlotIndex(*name, &mode);
+ InitializationFlag init_flag;
+ int index = scope_info_->ContextSlotIndex(*name, &mode, &init_flag);
if (index < 0) {
// Check parameters.
mode = VAR;
+ init_flag = kCreatedInitialized;
index = scope_info_->ParameterIndex(*name);
- if (index < 0) {
- // Check the function name.
- index = scope_info_->FunctionContextSlotIndex(*name, NULL);
- if (index < 0) return NULL;
- }
+ if (index < 0) return NULL;
}
Variable* var =
- variables_.Declare(this, name, mode, true, Variable::NORMAL);
+ variables_.Declare(this,
+ name,
+ mode,
+ true,
+ Variable::NORMAL,
+ init_flag);
var->AllocateTo(Variable::CONTEXT, index);
return var;
}
+Variable* Scope::LookupFunctionVar(Handle<String> name,
+ AstNodeFactory<AstNullVisitor>* factory) {
+ if (function_ != NULL && function_->name().is_identical_to(name)) {
+ return function_->var();
+ } else if (!scope_info_.is_null()) {
+ // If we are backed by a scope info, try to lookup the variable there.
+ VariableMode mode;
+ int index = scope_info_->FunctionContextSlotIndex(*name, &mode);
+ if (index < 0) return NULL;
+ Variable* var = DeclareFunctionVar(name, mode, factory);
+ var->AllocateTo(Variable::CONTEXT, index);
+ return var;
+ } else {
+ return NULL;
+ }
+}
+
+
Variable* Scope::Lookup(Handle<String> name) {
for (Scope* scope = this;
scope != NULL;
@@ -387,25 +439,19 @@ Variable* Scope::Lookup(Handle<String> name) {
}
-Variable* Scope::DeclareFunctionVar(Handle<String> name, VariableMode mode) {
- ASSERT(is_function_scope() && function_ == NULL);
- Variable* function_var =
- new Variable(this, name, mode, true, Variable::NORMAL);
- function_ = new(isolate_->zone()) VariableProxy(isolate_, function_var);
- return function_var;
-}
-
-
void Scope::DeclareParameter(Handle<String> name, VariableMode mode) {
ASSERT(!already_resolved());
ASSERT(is_function_scope());
- Variable* var =
- variables_.Declare(this, name, mode, true, Variable::NORMAL);
+ Variable* var = variables_.Declare(
+ this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
params_.Add(var);
}
-Variable* Scope::DeclareLocal(Handle<String> name, VariableMode mode) {
+Variable* Scope::DeclareLocal(Handle<String> name,
+ VariableMode mode,
+ InitializationFlag init_flag,
+ Interface* interface) {
ASSERT(!already_resolved());
// This function handles VAR and CONST modes. DYNAMIC variables are
// introduces during variable allocation, INTERNAL variables are allocated
@@ -415,27 +461,19 @@ Variable* Scope::DeclareLocal(Handle<String> name, VariableMode mode) {
mode == CONST_HARMONY ||
mode == LET);
++num_var_or_const_;
- return variables_.Declare(this, name, mode, true, Variable::NORMAL);
+ return variables_.Declare(
+ this, name, mode, true, Variable::NORMAL, init_flag, interface);
}
Variable* Scope::DeclareGlobal(Handle<String> name) {
ASSERT(is_global_scope());
- return variables_.Declare(this, name, DYNAMIC_GLOBAL,
+ return variables_.Declare(this,
+ name,
+ DYNAMIC_GLOBAL,
true,
- Variable::NORMAL);
-}
-
-
-VariableProxy* Scope::NewUnresolved(Handle<String> name, int position) {
- // Note that we must not share the unresolved variables with
- // the same name because they may be removed selectively via
- // RemoveUnresolved().
- ASSERT(!already_resolved());
- VariableProxy* proxy = new(isolate_->zone()) VariableProxy(
- isolate_, name, false, position);
- unresolved_.Add(proxy);
- return proxy;
+ Variable::NORMAL,
+ kCreatedInitialized);
}
@@ -457,7 +495,8 @@ Variable* Scope::NewTemporary(Handle<String> name) {
name,
TEMPORARY,
true,
- Variable::NORMAL);
+ Variable::NORMAL,
+ kCreatedInitialized);
temps_.Add(var);
return var;
}
@@ -507,61 +546,77 @@ Declaration* Scope::CheckConflictingVarDeclarations() {
}
-template<class Allocator>
-void Scope::CollectUsedVariables(List<Variable*, Allocator>* locals) {
- // Collect variables in this scope.
- // Note that the function_ variable - if present - is not
- // collected here but handled separately in ScopeInfo
- // which is the current user of this function).
+VariableProxy* Scope::CheckAssignmentToConst() {
+ // Check this scope.
+ if (is_extended_mode()) {
+ for (int i = 0; i < unresolved_.length(); i++) {
+ ASSERT(unresolved_[i]->var() != NULL);
+ if (unresolved_[i]->var()->is_const_mode() &&
+ unresolved_[i]->IsLValue()) {
+ return unresolved_[i];
+ }
+ }
+ }
+
+ // Check inner scopes.
+ for (int i = 0; i < inner_scopes_.length(); i++) {
+ VariableProxy* proxy = inner_scopes_[i]->CheckAssignmentToConst();
+ if (proxy != NULL) return proxy;
+ }
+
+ // No assignments to const found.
+ return NULL;
+}
+
+
+void Scope::CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
+ ZoneList<Variable*>* context_locals) {
+ ASSERT(stack_locals != NULL);
+ ASSERT(context_locals != NULL);
+
+ // Collect temporaries which are always allocated on the stack.
for (int i = 0; i < temps_.length(); i++) {
Variable* var = temps_[i];
if (var->is_used()) {
- locals->Add(var);
+ ASSERT(var->IsStackLocal());
+ stack_locals->Add(var);
}
}
+
+ // Collect declared local variables.
for (VariableMap::Entry* p = variables_.Start();
p != NULL;
p = variables_.Next(p)) {
Variable* var = reinterpret_cast<Variable*>(p->value);
if (var->is_used()) {
- locals->Add(var);
+ if (var->IsStackLocal()) {
+ stack_locals->Add(var);
+ } else if (var->IsContextSlot()) {
+ context_locals->Add(var);
+ }
}
}
}
-// Make sure the method gets instantiated by the template system.
-template void Scope::CollectUsedVariables(
- List<Variable*, FreeStoreAllocationPolicy>* locals);
-template void Scope::CollectUsedVariables(
- List<Variable*, PreallocatedStorage>* locals);
-template void Scope::CollectUsedVariables(
- List<Variable*, ZoneListAllocationPolicy>* locals);
-
-
-void Scope::AllocateVariables(Handle<Context> context) {
- ASSERT(outer_scope_ == NULL); // eval or global scopes only
-
+bool Scope::AllocateVariables(CompilationInfo* info,
+ AstNodeFactory<AstNullVisitor>* factory) {
// 1) Propagate scope information.
- // If we are in an eval scope, we may have other outer scopes about
- // which we don't know anything at this point. Thus we must be conservative
- // and assume they may invoke eval themselves. Eventually we could capture
- // this information in the ScopeInfo and then use it here (by traversing
- // the call chain stack, at compile time).
-
bool outer_scope_calls_non_strict_eval = false;
- if (!is_global_scope()) {
- context->ComputeEvalScopeInfo(&outer_scope_calls_non_strict_eval);
+ if (outer_scope_ != NULL) {
+ outer_scope_calls_non_strict_eval =
+ outer_scope_->outer_scope_calls_non_strict_eval() |
+ outer_scope_->calls_non_strict_eval();
}
PropagateScopeInfo(outer_scope_calls_non_strict_eval);
// 2) Resolve variables.
- Scope* global_scope = NULL;
- if (is_global_scope()) global_scope = this;
- ResolveVariablesRecursively(global_scope, context);
+ if (!ResolveVariablesRecursively(info, factory)) return false;
// 3) Allocate variables.
AllocateVariablesRecursively();
+
+ return true;
}
@@ -613,25 +668,25 @@ Scope* Scope::DeclarationScope() {
}
-Handle<SerializedScopeInfo> Scope::GetSerializedScopeInfo() {
+Handle<ScopeInfo> Scope::GetScopeInfo() {
if (scope_info_.is_null()) {
- scope_info_ = SerializedScopeInfo::Create(this);
+ scope_info_ = ScopeInfo::Create(this);
}
return scope_info_;
}
void Scope::GetNestedScopeChain(
- List<Handle<SerializedScopeInfo> >* chain,
+ List<Handle<ScopeInfo> >* chain,
int position) {
- chain->Add(Handle<SerializedScopeInfo>(GetSerializedScopeInfo()));
+ if (!is_eval_scope()) chain->Add(Handle<ScopeInfo>(GetScopeInfo()));
for (int i = 0; i < inner_scopes_.length(); i++) {
Scope* scope = inner_scopes_[i];
int beg_pos = scope->start_position();
int end_pos = scope->end_position();
ASSERT(beg_pos >= 0 && end_pos >= 0);
- if (beg_pos <= position && position <= end_pos) {
+ if (beg_pos <= position && position < end_pos) {
scope->GetNestedScopeChain(chain, position);
return;
}
@@ -644,6 +699,7 @@ static const char* Header(ScopeType type) {
switch (type) {
case EVAL_SCOPE: return "eval";
case FUNCTION_SCOPE: return "function";
+ case MODULE_SCOPE: return "module";
case GLOBAL_SCOPE: return "global";
case CATCH_SCOPE: return "catch";
case BLOCK_SCOPE: return "block";
@@ -692,9 +748,9 @@ static void PrintVar(int indent, Variable* var) {
PrintName(var->name());
PrintF("; // ");
PrintLocation(var);
- if (var->is_accessed_from_inner_scope()) {
+ if (var->has_forced_context_allocation()) {
if (!var->IsUnallocated()) PrintF(", ");
- PrintF("inner scope access");
+ PrintF("forced context allocation");
}
PrintF("\n");
}
@@ -730,7 +786,7 @@ void Scope::Print(int n) {
PrintF(")");
}
- PrintF(" {\n");
+ PrintF(" { // (%d, %d)\n", start_position(), end_position());
// Function name, if any (named function literals, only).
if (function_ != NULL) {
@@ -743,7 +799,16 @@ void Scope::Print(int n) {
if (HasTrivialOuterContext()) {
Indent(n1, "// scope has trivial outer context\n");
}
- if (is_strict_mode()) Indent(n1, "// strict mode scope\n");
+ switch (language_mode()) {
+ case CLASSIC_MODE:
+ break;
+ case STRICT_MODE:
+ Indent(n1, "// strict mode scope\n");
+ break;
+ case EXTENDED_MODE:
+ Indent(n1, "// extended mode scope\n");
+ break;
+ }
if (scope_inside_with_) Indent(n1, "// scope inside 'with'\n");
if (scope_contains_with_) Indent(n1, "// scope contains 'with'\n");
if (scope_calls_eval_) Indent(n1, "// scope calls 'eval'\n");
@@ -796,7 +861,14 @@ Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
Variable* var = map->Lookup(name);
if (var == NULL) {
// Declare a new non-local.
- var = map->Declare(NULL, name, mode, true, Variable::NORMAL);
+ InitializationFlag init_flag = (mode == VAR)
+ ? kCreatedInitialized : kNeedsInitialization;
+ var = map->Declare(NULL,
+ name,
+ mode,
+ true,
+ Variable::NORMAL,
+ init_flag);
// Allocate it by giving it a dynamic lookup.
var->AllocateTo(Variable::LOOKUP, -1);
}
@@ -805,8 +877,8 @@ Variable* Scope::NonLocal(Handle<String> name, VariableMode mode) {
Variable* Scope::LookupRecursive(Handle<String> name,
- Handle<Context> context,
- BindingKind* binding_kind) {
+ BindingKind* binding_kind,
+ AstNodeFactory<AstNullVisitor>* factory) {
ASSERT(binding_kind != NULL);
// Try to find the variable in this scope.
Variable* var = LocalLookup(name);
@@ -822,18 +894,17 @@ Variable* Scope::LookupRecursive(Handle<String> name,
// We did not find a variable locally. Check against the function variable,
// if any. We can do this for all scopes, since the function variable is
// only present - if at all - for function scopes.
- //
- // This lookup corresponds to a lookup in the "intermediate" scope sitting
- // between this scope and the outer scope. (ECMA-262, 3rd., requires that
- // the name of named function literal is kept in an intermediate scope
- // in between this scope and the next outer scope.)
*binding_kind = UNBOUND;
- if (function_ != NULL && function_->name().is_identical_to(name)) {
- var = function_->var();
+ var = LookupFunctionVar(name, factory);
+ if (var != NULL) {
*binding_kind = BOUND;
} else if (outer_scope_ != NULL) {
- var = outer_scope_->LookupRecursive(name, context, binding_kind);
- if (*binding_kind == BOUND) var->MarkAsAccessedFromInnerScope();
+ var = outer_scope_->LookupRecursive(name, binding_kind, factory);
+ if (*binding_kind == BOUND && (is_function_scope() || is_with_scope())) {
+ var->ForceContextAllocation();
+ }
+ } else {
+ ASSERT(is_global_scope());
}
if (is_with_scope()) {
@@ -845,16 +916,6 @@ Variable* Scope::LookupRecursive(Handle<String> name,
// object).
*binding_kind = DYNAMIC_LOOKUP;
return NULL;
- } else if (is_eval_scope()) {
- // No local binding was found, no 'with' statements have been encountered
- // and the code is executed as part of a call to 'eval'. The calling context
- // contains scope information that we can use to determine if the variable
- // is global, i.e. the calling context chain does not contain a binding and
- // no 'with' contexts.
- ASSERT(*binding_kind == UNBOUND);
- *binding_kind = context->GlobalIfNotShadowedByEval(name)
- ? UNBOUND_EVAL_SHADOWED : DYNAMIC_LOOKUP;
- return NULL;
} else if (calls_non_strict_eval()) {
// A variable binding may have been found in an outer scope, but the current
// scope makes a non-strict 'eval' call, so the found variable may not be
@@ -870,18 +931,18 @@ Variable* Scope::LookupRecursive(Handle<String> name,
}
-void Scope::ResolveVariable(Scope* global_scope,
- Handle<Context> context,
- VariableProxy* proxy) {
- ASSERT(global_scope == NULL || global_scope->is_global_scope());
+bool Scope::ResolveVariable(CompilationInfo* info,
+ VariableProxy* proxy,
+ AstNodeFactory<AstNullVisitor>* factory) {
+ ASSERT(info->global_scope()->is_global_scope());
// If the proxy is already resolved there's nothing to do
// (functions and consts may be resolved by the parser).
- if (proxy->var() != NULL) return;
+ if (proxy->var() != NULL) return true;
// Otherwise, try to resolve the variable.
BindingKind binding_kind;
- Variable* var = LookupRecursive(proxy->name(), context, &binding_kind);
+ Variable* var = LookupRecursive(proxy->name(), &binding_kind, factory);
switch (binding_kind) {
case BOUND:
// We found a variable binding.
@@ -892,26 +953,6 @@ void Scope::ResolveVariable(Scope* global_scope,
// by 'eval' introduced variable bindings.
if (var->is_global()) {
var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
-
- if (qml_mode_) {
- Handle<GlobalObject> global = isolate_->global();
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
- if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
- //Get the context before the debugger was entered.
- SaveContext *save = isolate_->save_context();
- while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
- save = save->prev();
-
- global = Handle<GlobalObject>(save->context()->global());
- }
-#endif
-
- if (qml_mode_ && !global->HasProperty(*(proxy->name()))) {
- var->set_is_qml_global(true);
- }
- }
-
} else {
Variable* invalidated = var;
var = NonLocal(proxy->name(), DYNAMIC_LOCAL);
@@ -921,54 +962,13 @@ void Scope::ResolveVariable(Scope* global_scope,
case UNBOUND:
// No binding has been found. Declare a variable in global scope.
- ASSERT(global_scope != NULL);
- var = global_scope->DeclareGlobal(proxy->name());
-
- if (qml_mode_) {
- Handle<GlobalObject> global = isolate_->global();
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
- if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
- //Get the context before the debugger was entered.
- SaveContext *save = isolate_->save_context();
- while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
- save = save->prev();
-
- global = Handle<GlobalObject>(save->context()->global());
- }
-#endif
-
- if (!global->HasProperty(*(proxy->name()))) {
- var->set_is_qml_global(true);
- }
- }
-
+ var = info->global_scope()->DeclareGlobal(proxy->name());
break;
case UNBOUND_EVAL_SHADOWED:
// No binding has been found. But some scope makes a
// non-strict 'eval' call.
var = NonLocal(proxy->name(), DYNAMIC_GLOBAL);
-
- if (qml_mode_) {
- Handle<GlobalObject> global = isolate_->global();
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
- if (isolate_->debug()->IsLoaded() && isolate_->debug()->InDebugger()) {
- //Get the context before the debugger was entered.
- SaveContext *save = isolate_->save_context();
- while (save != NULL && *save->context() == *isolate_->debug()->debug_context())
- save = save->prev();
-
- global = Handle<GlobalObject>(save->context()->global());
- }
-#endif
-
- if (qml_mode_ && !global->HasProperty(*(proxy->name()))) {
- var->set_is_qml_global(true);
- }
- }
-
break;
case DYNAMIC_LOOKUP:
@@ -979,22 +979,62 @@ void Scope::ResolveVariable(Scope* global_scope,
ASSERT(var != NULL);
proxy->BindTo(var);
+
+ if (FLAG_harmony_modules) {
+ bool ok;
+#ifdef DEBUG
+ if (FLAG_print_interface_details)
+ PrintF("# Resolve %s:\n", var->name()->ToAsciiArray());
+#endif
+ proxy->interface()->Unify(var->interface(), &ok);
+ if (!ok) {
+#ifdef DEBUG
+ if (FLAG_print_interfaces) {
+ PrintF("SCOPES TYPE ERROR\n");
+ PrintF("proxy: ");
+ proxy->interface()->Print();
+ PrintF("var: ");
+ var->interface()->Print();
+ }
+#endif
+
+ // Inconsistent use of module. Throw a syntax error.
+ // TODO(rossberg): generate more helpful error message.
+ MessageLocation location(info->script(),
+ proxy->position(),
+ proxy->position());
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Handle<JSArray> array = factory->NewJSArray(1);
+ USE(JSObject::SetElement(array, 0, var->name(), NONE, kStrictMode));
+ Handle<Object> result =
+ factory->NewSyntaxError("module_type_error", array);
+ isolate->Throw(*result, &location);
+ return false;
+ }
+ }
+
+ return true;
}
-void Scope::ResolveVariablesRecursively(Scope* global_scope,
- Handle<Context> context) {
- ASSERT(global_scope == NULL || global_scope->is_global_scope());
+bool Scope::ResolveVariablesRecursively(
+ CompilationInfo* info,
+ AstNodeFactory<AstNullVisitor>* factory) {
+ ASSERT(info->global_scope()->is_global_scope());
// Resolve unresolved variables for this scope.
for (int i = 0; i < unresolved_.length(); i++) {
- ResolveVariable(global_scope, context, unresolved_[i]);
+ if (!ResolveVariable(info, unresolved_[i], factory)) return false;
}
// Resolve unresolved variables for inner scopes.
for (int i = 0; i < inner_scopes_.length(); i++) {
- inner_scopes_[i]->ResolveVariablesRecursively(global_scope, context);
+ if (!inner_scopes_[i]->ResolveVariablesRecursively(info, factory))
+ return false;
}
+
+ return true;
}
@@ -1004,8 +1044,7 @@ bool Scope::PropagateScopeInfo(bool outer_scope_calls_non_strict_eval ) {
}
bool calls_non_strict_eval =
- (scope_calls_eval_ && !is_strict_mode()) ||
- outer_scope_calls_non_strict_eval_;
+ this->calls_non_strict_eval() || outer_scope_calls_non_strict_eval_;
for (int i = 0; i < inner_scopes_.length(); i++) {
Scope* inner_scope = inner_scopes_[i];
if (inner_scope->PropagateScopeInfo(calls_non_strict_eval)) {
@@ -1025,7 +1064,7 @@ bool Scope::MustAllocate(Variable* var) {
// via an eval() call. This is only possible if the variable has a
// visible name.
if ((var->is_this() || var->name()->length() > 0) &&
- (var->is_accessed_from_inner_scope() ||
+ (var->has_forced_context_allocation() ||
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
scope_contains_with_ ||
@@ -1048,7 +1087,7 @@ bool Scope::MustAllocateInContext(Variable* var) {
// catch-bound variables are always allocated in a context.
if (var->mode() == TEMPORARY) return false;
if (is_catch_scope() || is_block_scope()) return true;
- return var->is_accessed_from_inner_scope() ||
+ return var->has_forced_context_allocation() ||
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
scope_contains_with_ ||
@@ -1101,7 +1140,7 @@ void Scope::AllocateParameterLocals() {
// In strict mode 'arguments' does not alias formal parameters.
// Therefore in strict mode we allocate parameters as if 'arguments'
// were not used.
- uses_nonstrict_arguments = !is_strict_mode();
+ uses_nonstrict_arguments = is_classic_mode();
}
// The same parameter may occur multiple times in the parameters_ list.
@@ -1112,9 +1151,8 @@ void Scope::AllocateParameterLocals() {
Variable* var = params_[i];
ASSERT(var->scope() == this);
if (uses_nonstrict_arguments) {
- // Give the parameter a use from an inner scope, to force allocation
- // to the context.
- var->MarkAsAccessedFromInnerScope();
+ // Force context allocation of the parameter.
+ var->ForceContextAllocation();
}
if (MustAllocate(var)) {
@@ -1189,21 +1227,15 @@ void Scope::AllocateVariablesRecursively() {
if (is_function_scope()) AllocateParameterLocals();
AllocateNonParameterLocals();
- // Allocate context if necessary.
- bool must_have_local_context = false;
- if (scope_calls_eval_ || scope_contains_with_) {
- // The context for the eval() call or 'with' statement in this scope.
- // Unless we are in the global or an eval scope, we need a local
- // context even if we didn't statically allocate any locals in it,
- // and the compiler will access the context variable. If we are
- // not in an inner scope, the scope is provided from the outside.
- must_have_local_context = is_function_scope();
- }
+ // Force allocation of a context for this scope if necessary. For a 'with'
+ // scope and for a function scope that makes an 'eval' call we need a context,
+ // even if no local variables were statically allocated in the scope.
+ bool must_have_context = is_with_scope() ||
+ (is_function_scope() && calls_eval());
// If we didn't allocate any locals in the local context, then we only
- // need the minimal number of slots if we must have a local context.
- if (num_heap_slots_ == Context::MIN_CONTEXT_SLOTS &&
- !must_have_local_context) {
+ // need the minimal number of slots if we must have a context.
+ if (num_heap_slots_ == Context::MIN_CONTEXT_SLOTS && !must_have_context) {
num_heap_slots_ = 0;
}
@@ -1211,4 +1243,17 @@ void Scope::AllocateVariablesRecursively() {
ASSERT(num_heap_slots_ == 0 || num_heap_slots_ >= Context::MIN_CONTEXT_SLOTS);
}
+
+int Scope::StackLocalCount() const {
+ return num_stack_slots() -
+ (function_ != NULL && function_->var()->IsStackLocal() ? 1 : 0);
+}
+
+
+int Scope::ContextLocalCount() const {
+ if (num_heap_slots() == 0) return 0;
+ return num_heap_slots() - Context::MIN_CONTEXT_SLOTS -
+ (function_ != NULL && function_->var()->IsContextSlot() ? 1 : 0);
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/scopes.h b/src/3rdparty/v8/src/scopes.h
index b04332c..d315b7e 100644
--- a/src/3rdparty/v8/src/scopes.h
+++ b/src/3rdparty/v8/src/scopes.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,7 +29,7 @@
#define V8_SCOPES_H_
#include "ast.h"
-#include "hashmap.h"
+#include "zone.h"
namespace v8 {
namespace internal {
@@ -38,7 +38,7 @@ class CompilationInfo;
// A hash map to support fast variable declaration and lookup.
-class VariableMap: public HashMap {
+class VariableMap: public ZoneHashMap {
public:
VariableMap();
@@ -48,7 +48,9 @@ class VariableMap: public HashMap {
Handle<String> name,
VariableMode mode,
bool is_valid_lhs,
- Variable::Kind kind);
+ Variable::Kind kind,
+ InitializationFlag initialization_flag,
+ Interface* interface = Interface::NewValue());
Variable* Lookup(Handle<String> name);
};
@@ -92,8 +94,7 @@ class Scope: public ZoneObject {
// doesn't re-allocate variables repeatedly.
static bool Analyze(CompilationInfo* info);
- static Scope* DeserializeScopeChain(CompilationInfo* info,
- Scope* innermost_scope);
+ static Scope* DeserializeScopeChain(Context* context, Scope* global_scope);
// The scope name is only used for printing/debugging.
void SetScopeName(Handle<String> scope_name) { scope_name_ = scope_name; }
@@ -111,6 +112,13 @@ class Scope: public ZoneObject {
// Lookup a variable in this scope. Returns the variable or NULL if not found.
Variable* LocalLookup(Handle<String> name);
+ // This lookup corresponds to a lookup in the "intermediate" scope sitting
+ // between this scope and the outer scope. (ECMA-262, 3rd., requires that
+ // the name of named function literal is kept in an intermediate scope
+ // in between this scope and the next outer scope.)
+ Variable* LookupFunctionVar(Handle<String> name,
+ AstNodeFactory<AstNullVisitor>* factory);
+
// Lookup a variable in this scope or outer scopes.
// Returns the variable or NULL if not found.
Variable* Lookup(Handle<String> name);
@@ -118,7 +126,16 @@ class Scope: public ZoneObject {
// Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable.
- Variable* DeclareFunctionVar(Handle<String> name, VariableMode mode);
+ template<class Visitor>
+ Variable* DeclareFunctionVar(Handle<String> name,
+ VariableMode mode,
+ AstNodeFactory<Visitor>* factory) {
+ ASSERT(is_function_scope() && function_ == NULL);
+ Variable* function_var = new Variable(
+ this, name, mode, true, Variable::NORMAL, kCreatedInitialized);
+ function_ = factory->NewVariableProxy(function_var);
+ return function_var;
+ }
// Declare a parameter in this scope. When there are duplicated
// parameters the rightmost one 'wins'. However, the implementation
@@ -127,7 +144,10 @@ class Scope: public ZoneObject {
// Declare a local variable in this scope. If the variable has been
// declared before, the previously declared variable is returned.
- Variable* DeclareLocal(Handle<String> name, VariableMode mode);
+ Variable* DeclareLocal(Handle<String> name,
+ VariableMode mode,
+ InitializationFlag init_flag,
+ Interface* interface = Interface::NewValue());
// Declare an implicit global variable in this scope which must be a
// global scope. The variable was introduced (possibly from an inner
@@ -136,8 +156,20 @@ class Scope: public ZoneObject {
Variable* DeclareGlobal(Handle<String> name);
// Create a new unresolved variable.
- VariableProxy* NewUnresolved(Handle<String> name,
- int position = RelocInfo::kNoPosition);
+ template<class Visitor>
+ VariableProxy* NewUnresolved(AstNodeFactory<Visitor>* factory,
+ Handle<String> name,
+ int position = RelocInfo::kNoPosition,
+ Interface* interface = Interface::NewValue()) {
+ // Note that we must not share the unresolved variables with
+ // the same name because they may be removed selectively via
+ // RemoveUnresolved().
+ ASSERT(!already_resolved());
+ VariableProxy* proxy =
+ factory->NewVariableProxy(name, false, position, interface);
+ unresolved_.Add(proxy);
+ return proxy;
+ }
// Remove a unresolved variable. During parsing, an unresolved variable
// may have been added optimistically, but then only the variable name
@@ -179,6 +211,11 @@ class Scope: public ZoneObject {
// scope over a let binding of the same name.
Declaration* CheckConflictingVarDeclarations();
+ // For harmony block scoping mode: Check if the scope has variable proxies
+ // that are used as lvalues and point to const variables. Assumes that scopes
+ // have been analyzed and variables been resolved.
+ VariableProxy* CheckAssignmentToConst();
+
// ---------------------------------------------------------------------------
// Scope-specific info.
@@ -189,8 +226,8 @@ class Scope: public ZoneObject {
void RecordEvalCall() { if (!is_global_scope()) scope_calls_eval_ = true; }
// Set the strict mode flag (unless disabled by a global flag).
- void SetStrictModeFlag(StrictModeFlag strict_mode_flag) {
- strict_mode_flag_ = FLAG_strict_mode ? strict_mode_flag : kNonStrictMode;
+ void SetLanguageMode(LanguageMode language_mode) {
+ language_mode_ = language_mode;
}
// Position in the source where this scope begins and ends.
@@ -224,17 +261,13 @@ class Scope: public ZoneObject {
end_position_ = statement_pos;
}
- // Enable qml mode for this scope
- void EnableQmlMode() {
- qml_mode_ = true;
- }
-
// ---------------------------------------------------------------------------
// Predicates.
// Specific scope types.
bool is_eval_scope() const { return type_ == EVAL_SCOPE; }
bool is_function_scope() const { return type_ == FUNCTION_SCOPE; }
+ bool is_module_scope() const { return type_ == MODULE_SCOPE; }
bool is_global_scope() const { return type_ == GLOBAL_SCOPE; }
bool is_catch_scope() const { return type_ == CATCH_SCOPE; }
bool is_block_scope() const { return type_ == BLOCK_SCOPE; }
@@ -242,16 +275,20 @@ class Scope: public ZoneObject {
bool is_declaration_scope() const {
return is_eval_scope() || is_function_scope() || is_global_scope();
}
- bool is_strict_mode() const { return strict_mode_flag() == kStrictMode; }
- bool is_qml_mode() const { return qml_mode_; }
- bool is_strict_mode_eval_scope() const {
- return is_eval_scope() && is_strict_mode();
+ bool is_classic_mode() const {
+ return language_mode() == CLASSIC_MODE;
+ }
+ bool is_extended_mode() const {
+ return language_mode() == EXTENDED_MODE;
+ }
+ bool is_strict_or_extended_eval_scope() const {
+ return is_eval_scope() && !is_classic_mode();
}
// Information about which scopes calls eval.
bool calls_eval() const { return scope_calls_eval_; }
bool calls_non_strict_eval() {
- return scope_calls_eval_ && !is_strict_mode();
+ return scope_calls_eval_ && is_classic_mode();
}
bool outer_scope_calls_non_strict_eval() const {
return outer_scope_calls_non_strict_eval_;
@@ -262,17 +299,14 @@ class Scope: public ZoneObject {
// Does this scope contain a with statement.
bool contains_with() const { return scope_contains_with_; }
- // The scope immediately surrounding this scope, or NULL.
- Scope* outer_scope() const { return outer_scope_; }
-
// ---------------------------------------------------------------------------
// Accessors.
// The type of this scope.
ScopeType type() const { return type_; }
- // The strict mode of this scope.
- StrictModeFlag strict_mode_flag() const { return strict_mode_flag_; }
+ // The language mode of this scope.
+ LanguageMode language_mode() const { return language_mode_; }
// The variable corresponding the 'this' value.
Variable* receiver() { return receiver_; }
@@ -303,22 +337,20 @@ class Scope: public ZoneObject {
// Inner scope list.
ZoneList<Scope*>* inner_scopes() { return &inner_scopes_; }
+ // The scope immediately surrounding this scope, or NULL.
+ Scope* outer_scope() const { return outer_scope_; }
+
+ // The interface as inferred so far; only for module scopes.
+ Interface* interface() const { return interface_; }
+
// ---------------------------------------------------------------------------
// Variable allocation.
- // Collect all used locals in this scope.
- template<class Allocator>
- void CollectUsedVariables(List<Variable*, Allocator>* locals);
-
- // Resolve and fill in the allocation information for all variables
- // in this scopes. Must be called *after* all scopes have been
- // processed (parsed) to ensure that unresolved variables can be
- // resolved properly.
- //
- // In the case of code compiled and run using 'eval', the context
- // parameter is the context in which eval was called. In all other
- // cases the context parameter is an empty handle.
- void AllocateVariables(Handle<Context> context);
+ // Collect stack and context allocated local variables in this scope. Note
+ // that the function variable - if present - is not collected and should be
+ // handled separately.
+ void CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
+ ZoneList<Variable*>* context_locals);
// Current number of var or const locals.
int num_var_or_const() { return num_var_or_const_; }
@@ -327,12 +359,20 @@ class Scope: public ZoneObject {
int num_stack_slots() const { return num_stack_slots_; }
int num_heap_slots() const { return num_heap_slots_; }
+ int StackLocalCount() const;
+ int ContextLocalCount() const;
+
// Make sure this scope and all outer scopes are eagerly compiled.
void ForceEagerCompilation() { force_eager_compilation_ = true; }
// Determine if we can use lazy compilation for this scope.
bool AllowsLazyCompilation() const;
+ // True if we can lazily recompile functions with this scope.
+ bool allows_lazy_recompilation() const {
+ return !force_eager_compilation_;
+ }
+
// True if the outer context of this scope is always the global context.
bool HasTrivialOuterContext() const;
@@ -343,13 +383,13 @@ class Scope: public ZoneObject {
// where var declarations will be hoisted to in the implementation.
Scope* DeclarationScope();
- Handle<SerializedScopeInfo> GetSerializedScopeInfo();
+ Handle<ScopeInfo> GetScopeInfo();
// Get the chain of nested scopes within this scope for the source statement
// position. The scopes will be added to the list from the outermost scope to
// the innermost scope. Only nested block, catch or with scopes are tracked
// and will be returned, but no inner function scopes.
- void GetNestedScopeChain(List<Handle<SerializedScopeInfo> >* chain,
+ void GetNestedScopeChain(List<Handle<ScopeInfo> >* chain,
int statement_position);
// ---------------------------------------------------------------------------
@@ -409,6 +449,8 @@ class Scope: public ZoneObject {
VariableProxy* function_;
// Convenience variable; function scopes only.
Variable* arguments_;
+ // Interface; module scopes only.
+ Interface* interface_;
// Illegal redeclaration.
Expression* illegal_redecl_;
@@ -422,13 +464,11 @@ class Scope: public ZoneObject {
// This scope or a nested catch scope or with scope contain an 'eval' call. At
// the 'eval' call site this scope is the declaration scope.
bool scope_calls_eval_;
- // This scope is a strict mode scope.
- StrictModeFlag strict_mode_flag_;
+ // The language mode of this scope.
+ LanguageMode language_mode_;
// Source positions.
int start_position_;
int end_position_;
- // This scope is a qml mode scope.
- bool qml_mode_;
// Computed via PropagateScopeInfo.
bool outer_scope_calls_non_strict_eval_;
@@ -446,8 +486,8 @@ class Scope: public ZoneObject {
int num_stack_slots_;
int num_heap_slots_;
- // Serialized scopes support.
- Handle<SerializedScopeInfo> scope_info_;
+ // Serialized scope info support.
+ Handle<ScopeInfo> scope_info_;
bool already_resolved() { return already_resolved_; }
// Create a non-local variable with a given name.
@@ -504,13 +544,15 @@ class Scope: public ZoneObject {
// scope. If the code is executed because of a call to 'eval', the context
// parameter should be set to the calling context of 'eval'.
Variable* LookupRecursive(Handle<String> name,
- Handle<Context> context,
- BindingKind* binding_kind);
- void ResolveVariable(Scope* global_scope,
- Handle<Context> context,
- VariableProxy* proxy);
- void ResolveVariablesRecursively(Scope* global_scope,
- Handle<Context> context);
+ BindingKind* binding_kind,
+ AstNodeFactory<AstNullVisitor>* factory);
+ MUST_USE_RESULT
+ bool ResolveVariable(CompilationInfo* info,
+ VariableProxy* proxy,
+ AstNodeFactory<AstNullVisitor>* factory);
+ MUST_USE_RESULT
+ bool ResolveVariablesRecursively(CompilationInfo* info,
+ AstNodeFactory<AstNullVisitor>* factory);
// Scope analysis.
bool PropagateScopeInfo(bool outer_scope_calls_non_strict_eval);
@@ -529,11 +571,21 @@ class Scope: public ZoneObject {
void AllocateNonParameterLocals();
void AllocateVariablesRecursively();
+ // Resolve and fill in the allocation information for all variables
+ // in this scopes. Must be called *after* all scopes have been
+ // processed (parsed) to ensure that unresolved variables can be
+ // resolved properly.
+ //
+ // In the case of code compiled and run using 'eval', the context
+ // parameter is the context in which eval was called. In all other
+ // cases the context parameter is an empty handle.
+ MUST_USE_RESULT
+ bool AllocateVariables(CompilationInfo* info,
+ AstNodeFactory<AstNullVisitor>* factory);
+
private:
// Construct a scope based on the scope info.
- Scope(Scope* inner_scope,
- ScopeType type,
- Handle<SerializedScopeInfo> scope_info);
+ Scope(Scope* inner_scope, ScopeType type, Handle<ScopeInfo> scope_info);
// Construct a catch scope with a binding for the name.
Scope(Scope* inner_scope, Handle<String> catch_variable_name);
@@ -547,7 +599,7 @@ class Scope: public ZoneObject {
void SetDefaults(ScopeType type,
Scope* outer_scope,
- Handle<SerializedScopeInfo> scope_info);
+ Handle<ScopeInfo> scope_info);
};
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/serialize.cc b/src/3rdparty/v8/src/serialize.cc
index ba7b2a5..01d5f1c 100644
--- a/src/3rdparty/v8/src/serialize.cc
+++ b/src/3rdparty/v8/src/serialize.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -273,14 +273,22 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
STUB_CACHE_TABLE,
2,
"StubCache::primary_->value");
- Add(stub_cache->key_reference(StubCache::kSecondary).address(),
+ Add(stub_cache->map_reference(StubCache::kPrimary).address(),
STUB_CACHE_TABLE,
3,
+ "StubCache::primary_->map");
+ Add(stub_cache->key_reference(StubCache::kSecondary).address(),
+ STUB_CACHE_TABLE,
+ 4,
"StubCache::secondary_->key");
Add(stub_cache->value_reference(StubCache::kSecondary).address(),
STUB_CACHE_TABLE,
- 4,
+ 5,
"StubCache::secondary_->value");
+ Add(stub_cache->map_reference(StubCache::kSecondary).address(),
+ STUB_CACHE_TABLE,
+ 6,
+ "StubCache::secondary_->map");
// Runtime entries
Add(ExternalReference::perform_gc_function(isolate).address(),
@@ -494,6 +502,14 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
UNCLASSIFIED,
45,
"the_hole_nan");
+ Add(ExternalReference::get_date_field_function(isolate).address(),
+ UNCLASSIFIED,
+ 46,
+ "JSDate::GetField");
+ Add(ExternalReference::date_cache_stamp(isolate).address(),
+ UNCLASSIFIED,
+ 47,
+ "date_cache_stamp");
}
@@ -669,6 +685,14 @@ void Deserializer::Deserialize() {
isolate_->heap()->set_global_contexts_list(
isolate_->heap()->undefined_value());
+
+ // Update data pointers to the external strings containing natives sources.
+ for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
+ Object* source = isolate_->heap()->natives_source_cache()->get(i);
+ if (!source->IsUndefined()) {
+ ExternalAsciiString::cast(source)->update_data_cache();
+ }
+ }
}
@@ -825,13 +849,12 @@ void Deserializer::ReadChunk(Object** current,
if (how == kFromCode) { \
Address location_of_branch_data = \
reinterpret_cast<Address>(current); \
- Assembler::set_target_at(location_of_branch_data, \
- reinterpret_cast<Address>(new_object)); \
- if (within == kFirstInstruction) { \
- location_of_branch_data += Assembler::kCallTargetSize; \
- current = reinterpret_cast<Object**>(location_of_branch_data); \
- current_was_incremented = true; \
- } \
+ Assembler::deserialization_set_special_target_at( \
+ location_of_branch_data, \
+ reinterpret_cast<Address>(new_object)); \
+ location_of_branch_data += Assembler::kSpecialTargetSize; \
+ current = reinterpret_cast<Object**>(location_of_branch_data); \
+ current_was_incremented = true; \
} else { \
*current = new_object; \
} \
@@ -967,6 +990,21 @@ void Deserializer::ReadChunk(Object** current,
// Find a recently deserialized object using its offset from the current
// allocation point and write a pointer to it to the current object.
ALL_SPACES(kBackref, kPlain, kStartOfObject)
+#if V8_TARGET_ARCH_MIPS
+ // Deserialize a new object from pointer found in code and write
+ // a pointer to it to the current object. Required only for MIPS, and
+ // omitted on the other architectures because it is fully unrolled and
+ // would cause bloat.
+ ONE_PER_SPACE(kNewObject, kFromCode, kStartOfObject)
+ // Find a recently deserialized code object using its offset from the
+ // current allocation point and write a pointer to it to the current
+ // object. Required only for MIPS.
+ ALL_SPACES(kBackref, kFromCode, kStartOfObject)
+ // Find an already deserialized code object using its offset from
+ // the start and write a pointer to it to the current object.
+ // Required only for MIPS.
+ ALL_SPACES(kFromStart, kFromCode, kStartOfObject)
+#endif
// Find a recently deserialized code object using its offset from the
// current allocation point and write a pointer to its first instruction
// to the current code object or the instruction pointer in a function
@@ -1073,36 +1111,6 @@ void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
PutSection(static_cast<int>(integer & 0x7f), "IntLastPart");
}
-#ifdef DEBUG
-
-void Deserializer::Synchronize(const char* tag) {
- int data = source_->Get();
- // If this assert fails then that indicates that you have a mismatch between
- // the number of GC roots when serializing and deserializing.
- ASSERT_EQ(kSynchronize, data);
- do {
- int character = source_->Get();
- if (character == 0) break;
- if (FLAG_debug_serialization) {
- PrintF("%c", character);
- }
- } while (true);
- if (FLAG_debug_serialization) {
- PrintF("\n");
- }
-}
-
-
-void Serializer::Synchronize(const char* tag) {
- sink_->Put(kSynchronize, tag);
- int character;
- do {
- character = *tag++;
- sink_->PutSection(character, "TagCharacter");
- } while (character != 0);
-}
-
-#endif
Serializer::Serializer(SnapshotByteSink* sink)
: sink_(sink),
@@ -1110,9 +1118,10 @@ Serializer::Serializer(SnapshotByteSink* sink)
external_reference_encoder_(new ExternalReferenceEncoder),
large_object_total_(0),
root_index_wave_front_(0) {
+ isolate_ = Isolate::Current();
// The serializer is meant to be used only to generate initial heap images
// from a context in which there is only one isolate.
- ASSERT(Isolate::Current()->IsDefaultIsolate());
+ ASSERT(isolate_->IsDefaultIsolate());
for (int i = 0; i <= LAST_SPACE; i++) {
fullness_[i] = 0;
}
@@ -1132,11 +1141,8 @@ void StartupSerializer::SerializeStrongReferences() {
CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
// We don't support serializing installed extensions.
- for (RegisteredExtension* ext = v8::RegisteredExtension::first_extension();
- ext != NULL;
- ext = ext->next()) {
- CHECK_NE(v8::INSTALLED, ext->state());
- }
+ CHECK(!isolate->has_installed_extensions());
+
HEAP->IterateStrongRoots(this, VISIT_ONLY_STRONG);
}
@@ -1237,12 +1243,23 @@ int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
}
-int Serializer::RootIndex(HeapObject* heap_object) {
+int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
Heap* heap = HEAP;
if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
for (int i = 0; i < root_index_wave_front_; i++) {
Object* root = heap->roots_array_start()[i];
- if (!root->IsSmi() && root == heap_object) return i;
+ if (!root->IsSmi() && root == heap_object) {
+#if V8_TARGET_ARCH_MIPS
+ if (from == kFromCode) {
+ // In order to avoid code bloat in the deserializer we don't have
+ // support for the encoding that specifies a particular root should
+ // be written into the lui/ori instructions on MIPS. Therefore we
+ // should not generate such serialization data for MIPS.
+ return kInvalidRootIndex;
+ }
+#endif
+ return i;
+ }
}
return kInvalidRootIndex;
}
@@ -1295,7 +1312,7 @@ void StartupSerializer::SerializeObject(
HeapObject* heap_object = HeapObject::cast(o);
int root_index;
- if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
+ if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
@@ -1359,8 +1376,15 @@ void PartialSerializer::SerializeObject(
CHECK(o->IsHeapObject());
HeapObject* heap_object = HeapObject::cast(o);
+ if (heap_object->IsMap()) {
+ // The code-caches link to context-specific code objects, which
+ // the startup and context serializes cannot currently handle.
+ ASSERT(Map::cast(heap_object)->code_cache() ==
+ heap_object->GetHeap()->raw_unchecked_empty_fixed_array());
+ }
+
int root_index;
- if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
+ if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
@@ -1440,7 +1464,7 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
while (current < end && !(*current)->IsSmi()) {
HeapObject* current_contents = HeapObject::cast(*current);
- int root_index = serializer_->RootIndex(current_contents);
+ int root_index = serializer_->RootIndex(current_contents, kPlain);
// Repeats are not subject to the write barrier so there are only some
// objects that can be used in a repeat encoding. These are the early
// ones in the root array that are never in new space.
@@ -1471,6 +1495,16 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
}
+void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
+ Object** current = rinfo->target_object_address();
+
+ OutputRawData(rinfo->target_address_address());
+ HowToCode representation = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
+ serializer_->SerializeObject(*current, representation, kStartOfObject);
+ bytes_processed_so_far_ += rinfo->target_address_size();
+}
+
+
void Serializer::ObjectSerializer::VisitExternalReferences(Address* start,
Address* end) {
Address references_start = reinterpret_cast<Address>(start);
@@ -1485,6 +1519,20 @@ void Serializer::ObjectSerializer::VisitExternalReferences(Address* start,
}
+void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
+ Address references_start = rinfo->target_address_address();
+ OutputRawData(references_start);
+
+ Address* current = rinfo->target_reference_address();
+ int representation = rinfo->IsCodedSpecially() ?
+ kFromCode + kStartOfObject : kPlain + kStartOfObject;
+ sink_->Put(kExternalReference + representation, "ExternalRef");
+ int reference_id = serializer_->EncodeExternalReference(*current);
+ sink_->PutInt(reference_id, "reference id");
+ bytes_processed_so_far_ += rinfo->target_address_size();
+}
+
+
void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
Address target_start = rinfo->target_address_address();
OutputRawData(target_start);
@@ -1636,8 +1684,8 @@ int Serializer::Allocate(int space, int size, bool* new_page) {
// serialized address.
CHECK(IsPowerOf2(Page::kPageSize));
int used_in_this_page = (fullness_[space] & (Page::kPageSize - 1));
- CHECK(size <= Page::kObjectAreaSize);
- if (used_in_this_page + size > Page::kObjectAreaSize) {
+ CHECK(size <= SpaceAreaSize(space));
+ if (used_in_this_page + size > SpaceAreaSize(space)) {
*new_page = true;
fullness_[space] = RoundUp(fullness_[space], Page::kPageSize);
}
@@ -1648,4 +1696,13 @@ int Serializer::Allocate(int space, int size, bool* new_page) {
}
+int Serializer::SpaceAreaSize(int space) {
+ if (space == CODE_SPACE) {
+ return isolate_->memory_allocator()->CodePageAreaSize();
+ } else {
+ return Page::kPageSize - Page::kObjectStartOffset;
+ }
+}
+
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/serialize.h b/src/3rdparty/v8/src/serialize.h
index 49695ec..f50e23e 100644
--- a/src/3rdparty/v8/src/serialize.h
+++ b/src/3rdparty/v8/src/serialize.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -228,7 +228,7 @@ class SerializerDeserializer: public ObjectVisitor {
kFromStart = 0x20, // Object is described relative to start.
// 0x21-0x28 One per space.
// 0x29-0x2f Free.
- // 0x30-0x3f Used by misc tags below.
+ // 0x30-0x3f Used by misc. tags below.
kPointedToMask = 0x3f
};
@@ -341,10 +341,6 @@ class Deserializer: public SerializerDeserializer {
// Deserialize a single object and the objects reachable from it.
void DeserializePartial(Object** root);
-#ifdef DEBUG
- virtual void Synchronize(const char* tag);
-#endif
-
private:
virtual void VisitPointers(Object** start, Object** end);
@@ -359,8 +355,8 @@ class Deserializer: public SerializerDeserializer {
// Fills in some heap data in an area from start to end (non-inclusive). The
// space id is used for the write barrier. The object_address is the address
// of the object we are writing into, or NULL if we are not writing into an
- // object, ie if we are writing a series of tagged values that are not on the
- // heap.
+ // object, i.e. if we are writing a series of tagged values that are not on
+ // the heap.
void ReadChunk(
Object** start, Object** end, int space, Address object_address);
HeapObject* GetAddressFromStart(int space);
@@ -485,14 +481,11 @@ class Serializer : public SerializerDeserializer {
SerializationAddressMapper* address_mapper() { return &address_mapper_; }
void PutRoot(
int index, HeapObject* object, HowToCode how, WhereToPoint where);
-#ifdef DEBUG
- virtual void Synchronize(const char* tag);
-#endif
protected:
static const int kInvalidRootIndex = -1;
- int RootIndex(HeapObject* heap_object);
+ int RootIndex(HeapObject* heap_object, HowToCode from);
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0;
intptr_t root_index_wave_front() { return root_index_wave_front_; }
void set_root_index_wave_front(intptr_t value) {
@@ -514,7 +507,9 @@ class Serializer : public SerializerDeserializer {
bytes_processed_so_far_(0) { }
void Serialize();
void VisitPointers(Object** start, Object** end);
+ void VisitEmbeddedPointer(RelocInfo* target);
void VisitExternalReferences(Address* start, Address* end);
+ void VisitExternalReference(RelocInfo* rinfo);
void VisitCodeTarget(RelocInfo* target);
void VisitCodeEntry(Address entry_address);
void VisitGlobalPropertyCell(RelocInfo* rinfo);
@@ -561,6 +556,9 @@ class Serializer : public SerializerDeserializer {
return external_reference_encoder_->Encode(addr);
}
+ int SpaceAreaSize(int space);
+
+ Isolate* isolate_;
// Keep track of the fullness of each space in order to generate
// relative addresses for back references. Large objects are
// just numbered sequentially since relative addresses make no
@@ -579,6 +577,7 @@ class Serializer : public SerializerDeserializer {
friend class ObjectSerializer;
friend class Deserializer;
+ private:
DISALLOW_COPY_AND_ASSIGN(Serializer);
};
@@ -608,7 +607,7 @@ class PartialSerializer : public Serializer {
ASSERT(!o->IsScript());
return o->IsString() || o->IsSharedFunctionInfo() ||
o->IsHeapNumber() || o->IsCode() ||
- o->IsSerializedScopeInfo() ||
+ o->IsScopeInfo() ||
o->map() == HEAP->fixed_cow_array_map();
}
@@ -630,7 +629,7 @@ class StartupSerializer : public Serializer {
// Serialize the current state of the heap. The order is:
// 1) Strong references.
// 2) Partial snapshot cache.
- // 3) Weak references (eg the symbol table).
+ // 3) Weak references (e.g. the symbol table).
virtual void SerializeStrongReferences();
virtual void SerializeObject(Object* o,
HowToCode how_to_code,
diff --git a/src/3rdparty/v8/src/small-pointer-list.h b/src/3rdparty/v8/src/small-pointer-list.h
index 6c5ce89..75fea06 100644
--- a/src/3rdparty/v8/src/small-pointer-list.h
+++ b/src/3rdparty/v8/src/small-pointer-list.h
@@ -69,6 +69,12 @@ class SmallPointerList {
data_ = kEmptyTag;
}
+ void Sort() {
+ if ((data_ & kTagMask) == kListTag) {
+ list()->Sort(compare_value);
+ }
+ }
+
bool is_empty() const { return length() == 0; }
int length() const {
@@ -159,6 +165,10 @@ class SmallPointerList {
private:
typedef ZoneList<T*> PointerList;
+ static int compare_value(T* const* a, T* const* b) {
+ return Compare<T>(**a, **b);
+ }
+
static const intptr_t kEmptyTag = 1;
static const intptr_t kSingletonTag = 0;
static const intptr_t kListTag = 2;
diff --git a/src/3rdparty/v8/src/spaces-inl.h b/src/3rdparty/v8/src/spaces-inl.h
index 1973b3a..55bf222 100644
--- a/src/3rdparty/v8/src/spaces-inl.h
+++ b/src/3rdparty/v8/src/spaces-inl.h
@@ -164,12 +164,10 @@ Page* Page::Initialize(Heap* heap,
Executability executable,
PagedSpace* owner) {
Page* page = reinterpret_cast<Page*>(chunk);
- ASSERT(chunk->size() == static_cast<size_t>(kPageSize));
+ ASSERT(chunk->size() <= static_cast<size_t>(kPageSize));
ASSERT(chunk->owner() == owner);
- owner->IncreaseCapacity(Page::kObjectAreaSize);
- owner->Free(page->ObjectAreaStart(),
- static_cast<int>(page->ObjectAreaEnd() -
- page->ObjectAreaStart()));
+ owner->IncreaseCapacity(page->area_size());
+ owner->Free(page->area_start(), page->area_size());
heap->incremental_marking()->SetOldSpacePageFlags(chunk);
@@ -248,7 +246,7 @@ void Page::set_prev_page(Page* page) {
// Try linear allocation in the page of alloc_info's allocation top. Does
-// not contain slow case logic (eg, move to the next page or try free list
+// not contain slow case logic (e.g. move to the next page or try free list
// allocation) so it can be used by all the allocation functions and for all
// the paged spaces.
HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) {
@@ -293,30 +291,12 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) {
// -----------------------------------------------------------------------------
// NewSpace
-MaybeObject* NewSpace::AllocateRawInternal(int size_in_bytes) {
+
+
+MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) {
Address old_top = allocation_info_.top;
if (allocation_info_.limit - old_top < size_in_bytes) {
- Address new_top = old_top + size_in_bytes;
- Address high = to_space_.page_high();
- if (allocation_info_.limit < high) {
- // Incremental marking has lowered the limit to get a
- // chance to do a step.
- allocation_info_.limit = Min(
- allocation_info_.limit + inline_allocation_limit_step_,
- high);
- int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_);
- heap()->incremental_marking()->Step(bytes_allocated);
- top_on_previous_step_ = new_top;
- return AllocateRawInternal(size_in_bytes);
- } else if (AddFreshPage()) {
- // Switched to new page. Try allocating again.
- int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
- heap()->incremental_marking()->Step(bytes_allocated);
- top_on_previous_step_ = to_space_.page_low();
- return AllocateRawInternal(size_in_bytes);
- } else {
- return Failure::RetryAfterGC();
- }
+ return SlowAllocateRaw(size_in_bytes);
}
Object* obj = HeapObject::FromAddress(allocation_info_.top);
@@ -350,7 +330,7 @@ void NewSpace::ShrinkStringAtAllocationBoundary(String* string, int length) {
string->set_length(length);
if (Marking::IsBlack(Marking::MarkBitFrom(string))) {
int delta = static_cast<int>(old_top - allocation_info_.top);
- MemoryChunk::IncrementLiveBytes(string->address(), -delta);
+ MemoryChunk::IncrementLiveBytesFromMutator(string->address(), -delta);
}
}
diff --git a/src/3rdparty/v8/src/spaces.cc b/src/3rdparty/v8/src/spaces.cc
index f467f71..6144464 100644
--- a/src/3rdparty/v8/src/spaces.cc
+++ b/src/3rdparty/v8/src/spaces.cc
@@ -75,8 +75,8 @@ HeapObjectIterator::HeapObjectIterator(Page* page,
owner == HEAP->cell_space() ||
owner == HEAP->code_space());
Initialize(reinterpret_cast<PagedSpace*>(owner),
- page->ObjectAreaStart(),
- page->ObjectAreaEnd(),
+ page->area_start(),
+ page->area_end(),
kOnePageOnly,
size_func);
ASSERT(page->WasSweptPrecisely());
@@ -108,12 +108,12 @@ bool HeapObjectIterator::AdvanceToNextPage() {
cur_page = space_->anchor();
} else {
cur_page = Page::FromAddress(cur_addr_ - 1);
- ASSERT(cur_addr_ == cur_page->ObjectAreaEnd());
+ ASSERT(cur_addr_ == cur_page->area_end());
}
cur_page = cur_page->next_page();
if (cur_page == space_->anchor()) return false;
- cur_addr_ = cur_page->ObjectAreaStart();
- cur_end_ = cur_page->ObjectAreaEnd();
+ cur_addr_ = cur_page->area_start();
+ cur_end_ = cur_page->area_end();
ASSERT(cur_page->WasSweptPrecisely());
return true;
}
@@ -132,7 +132,7 @@ CodeRange::CodeRange(Isolate* isolate)
}
-bool CodeRange::Setup(const size_t requested) {
+bool CodeRange::SetUp(const size_t requested) {
ASSERT(code_range_ == NULL);
code_range_ = new VirtualMemory(requested);
@@ -227,7 +227,9 @@ Address CodeRange::AllocateRawMemory(const size_t requested,
}
ASSERT(*allocated <= current.size);
ASSERT(IsAddressAligned(current.start, MemoryChunk::kAlignment));
- if (!code_range_->Commit(current.start, *allocated, true)) {
+ if (!MemoryAllocator::CommitCodePage(code_range_,
+ current.start,
+ *allocated)) {
*allocated = 0;
return NULL;
}
@@ -268,7 +270,7 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate)
}
-bool MemoryAllocator::Setup(intptr_t capacity, intptr_t capacity_executable) {
+bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable) {
capacity_ = RoundUp(capacity, Page::kPageSize);
capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize);
ASSERT_GE(capacity_, capacity_executable_);
@@ -358,11 +360,17 @@ Address MemoryAllocator::AllocateAlignedMemory(size_t size,
VirtualMemory reservation;
Address base = ReserveAlignedMemory(size, alignment, &reservation);
if (base == NULL) return NULL;
- if (!reservation.Commit(base,
- size,
- executable == EXECUTABLE)) {
- return NULL;
+
+ if (executable == EXECUTABLE) {
+ CommitCodePage(&reservation, base, size);
+ } else {
+ if (!reservation.Commit(base,
+ size,
+ executable == EXECUTABLE)) {
+ return NULL;
+ }
}
+
controller->TakeControl(&reservation);
return base;
}
@@ -378,9 +386,14 @@ void Page::InitializeAsAnchor(PagedSpace* owner) {
NewSpacePage* NewSpacePage::Initialize(Heap* heap,
Address start,
SemiSpace* semi_space) {
+ Address area_start = start + NewSpacePage::kObjectStartOffset;
+ Address area_end = start + Page::kPageSize;
+
MemoryChunk* chunk = MemoryChunk::Initialize(heap,
start,
Page::kPageSize,
+ area_start,
+ area_end,
NOT_EXECUTABLE,
semi_space);
chunk->set_next_chunk(NULL);
@@ -410,6 +423,8 @@ void NewSpacePage::InitializeAsAnchor(SemiSpace* semi_space) {
MemoryChunk* MemoryChunk::Initialize(Heap* heap,
Address base,
size_t size,
+ Address area_start,
+ Address area_end,
Executability executable,
Space* owner) {
MemoryChunk* chunk = FromAddress(base);
@@ -418,6 +433,8 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap,
chunk->heap_ = heap;
chunk->size_ = size;
+ chunk->area_start_ = area_start;
+ chunk->area_end_ = area_end;
chunk->flags_ = 0;
chunk->set_owner(owner);
chunk->InitializeReservedMemory();
@@ -431,9 +448,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap,
ASSERT(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
ASSERT(OFFSET_OF(MemoryChunk, live_byte_count_) == kLiveBytesOffset);
- if (executable == EXECUTABLE) chunk->SetFlag(IS_EXECUTABLE);
+ if (executable == EXECUTABLE) {
+ chunk->SetFlag(IS_EXECUTABLE);
+ }
- if (owner == heap->old_data_space()) chunk->SetFlag(CONTAINS_ONLY_DATA);
+ if (owner == heap->old_data_space()) {
+ chunk->SetFlag(CONTAINS_ONLY_DATA);
+ }
return chunk;
}
@@ -462,11 +483,16 @@ void MemoryChunk::Unlink() {
MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
Executability executable,
Space* owner) {
- size_t chunk_size = MemoryChunk::kObjectStartOffset + body_size;
+ size_t chunk_size;
Heap* heap = isolate_->heap();
Address base = NULL;
VirtualMemory reservation;
+ Address area_start = NULL;
+ Address area_end = NULL;
if (executable == EXECUTABLE) {
+ chunk_size = RoundUp(CodePageAreaStartOffset() + body_size,
+ OS::CommitPageSize()) + CodePageGuardSize();
+
// Check executable memory limit.
if (size_executable_ + chunk_size > capacity_executable_) {
LOG(isolate_,
@@ -494,18 +520,30 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
// Update executable memory size.
size_executable_ += reservation.size();
}
+
+#ifdef DEBUG
+ ZapBlock(base, CodePageGuardStartOffset());
+ ZapBlock(base + CodePageAreaStartOffset(), body_size);
+#endif
+ area_start = base + CodePageAreaStartOffset();
+ area_end = area_start + body_size;
} else {
+ chunk_size = MemoryChunk::kObjectStartOffset + body_size;
base = AllocateAlignedMemory(chunk_size,
MemoryChunk::kAlignment,
executable,
&reservation);
if (base == NULL) return NULL;
- }
#ifdef DEBUG
- ZapBlock(base, chunk_size);
+ ZapBlock(base, chunk_size);
#endif
+
+ area_start = base + Page::kObjectStartOffset;
+ area_end = base + chunk_size;
+ }
+
isolate_->counters()->memory_allocated()->
Increment(static_cast<int>(chunk_size));
@@ -518,6 +556,8 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
MemoryChunk* result = MemoryChunk::Initialize(heap,
base,
chunk_size,
+ area_start,
+ area_end,
executable,
owner);
result->set_reserved_memory(&reservation);
@@ -525,9 +565,10 @@ MemoryChunk* MemoryAllocator::AllocateChunk(intptr_t body_size,
}
-Page* MemoryAllocator::AllocatePage(PagedSpace* owner,
+Page* MemoryAllocator::AllocatePage(intptr_t size,
+ PagedSpace* owner,
Executability executable) {
- MemoryChunk* chunk = AllocateChunk(Page::kObjectAreaSize, executable, owner);
+ MemoryChunk* chunk = AllocateChunk(size, executable, owner);
if (chunk == NULL) return NULL;
@@ -536,8 +577,8 @@ Page* MemoryAllocator::AllocatePage(PagedSpace* owner,
LargePage* MemoryAllocator::AllocateLargePage(intptr_t object_size,
- Executability executable,
- Space* owner) {
+ Space* owner,
+ Executability executable) {
MemoryChunk* chunk = AllocateChunk(object_size, executable, owner);
if (chunk == NULL) return NULL;
return LargePage::Initialize(isolate_->heap(), chunk);
@@ -552,6 +593,9 @@ void MemoryAllocator::Free(MemoryChunk* chunk) {
PerformAllocationCallback(space, kAllocationActionFree, chunk->size());
}
+ isolate_->heap()->RememberUnmappedPage(
+ reinterpret_cast<Address>(chunk), chunk->IsEvacuationCandidate());
+
delete chunk->slots_buffer();
delete chunk->skip_list();
@@ -648,6 +692,76 @@ void MemoryAllocator::ReportStatistics() {
}
#endif
+
+int MemoryAllocator::CodePageGuardStartOffset() {
+ // We are guarding code pages: the first OS page after the header
+ // will be protected as non-writable.
+ return RoundUp(Page::kObjectStartOffset, OS::CommitPageSize());
+}
+
+
+int MemoryAllocator::CodePageGuardSize() {
+ return static_cast<int>(OS::CommitPageSize());
+}
+
+
+int MemoryAllocator::CodePageAreaStartOffset() {
+ // We are guarding code pages: the first OS page after the header
+ // will be protected as non-writable.
+ return CodePageGuardStartOffset() + CodePageGuardSize();
+}
+
+
+int MemoryAllocator::CodePageAreaEndOffset() {
+ // We are guarding code pages: the last OS page will be protected as
+ // non-writable.
+ return Page::kPageSize - static_cast<int>(OS::CommitPageSize());
+}
+
+
+bool MemoryAllocator::CommitCodePage(VirtualMemory* vm,
+ Address start,
+ size_t size) {
+ // Commit page header (not executable).
+ if (!vm->Commit(start,
+ CodePageGuardStartOffset(),
+ false)) {
+ return false;
+ }
+
+ // Create guard page after the header.
+ if (!vm->Guard(start + CodePageGuardStartOffset())) {
+ return false;
+ }
+
+ // Commit page body (executable).
+ size_t area_size = size - CodePageAreaStartOffset() - CodePageGuardSize();
+ if (!vm->Commit(start + CodePageAreaStartOffset(),
+ area_size,
+ true)) {
+ return false;
+ }
+
+ // Create guard page after the allocatable area.
+ if (!vm->Guard(start + CodePageAreaStartOffset() + area_size)) {
+ return false;
+ }
+
+ return true;
+}
+
+
+// -----------------------------------------------------------------------------
+// MemoryChunk implementation
+
+void MemoryChunk::IncrementLiveBytesFromMutator(Address address, int by) {
+ MemoryChunk* chunk = MemoryChunk::FromAddress(address);
+ if (!chunk->InNewSpace() && !static_cast<Page*>(chunk)->WasSwept()) {
+ static_cast<PagedSpace*>(chunk->owner())->IncrementUnsweptFreeBytes(-by);
+ }
+ chunk->IncrementLiveBytes(by);
+}
+
// -----------------------------------------------------------------------------
// PagedSpace implementation
@@ -659,9 +773,15 @@ PagedSpace::PagedSpace(Heap* heap,
free_list_(this),
was_swept_conservatively_(false),
first_unswept_page_(Page::FromAddress(NULL)),
- last_unswept_page_(Page::FromAddress(NULL)) {
+ unswept_free_bytes_(0) {
+ if (id == CODE_SPACE) {
+ area_size_ = heap->isolate()->memory_allocator()->
+ CodePageAreaSize();
+ } else {
+ area_size_ = Page::kPageSize - Page::kObjectStartOffset;
+ }
max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize)
- * Page::kObjectAreaSize;
+ * AreaSize();
accounting_stats_.Clear();
allocation_info_.top = NULL;
@@ -671,12 +791,12 @@ PagedSpace::PagedSpace(Heap* heap,
}
-bool PagedSpace::Setup() {
+bool PagedSpace::SetUp() {
return true;
}
-bool PagedSpace::HasBeenSetup() {
+bool PagedSpace::HasBeenSetUp() {
return true;
}
@@ -711,8 +831,7 @@ MaybeObject* PagedSpace::FindObject(Address addr) {
}
bool PagedSpace::CanExpand() {
- ASSERT(max_capacity_ % Page::kObjectAreaSize == 0);
- ASSERT(Capacity() % Page::kObjectAreaSize == 0);
+ ASSERT(max_capacity_ % AreaSize() == 0);
if (Capacity() == max_capacity_) return false;
@@ -727,8 +846,14 @@ bool PagedSpace::CanExpand() {
bool PagedSpace::Expand() {
if (!CanExpand()) return false;
- Page* p = heap()->isolate()->memory_allocator()->
- AllocatePage(this, executable());
+ intptr_t size = AreaSize();
+
+ if (anchor_.next_page() == &anchor_) {
+ size = SizeOfFirstPage();
+ }
+
+ Page* p = heap()->isolate()->memory_allocator()->AllocatePage(
+ size, this, executable());
if (p == NULL) return false;
ASSERT(Capacity() <= max_capacity_);
@@ -739,7 +864,38 @@ bool PagedSpace::Expand() {
}
-#ifdef DEBUG
+intptr_t PagedSpace::SizeOfFirstPage() {
+ int size = 0;
+ switch (identity()) {
+ case OLD_POINTER_SPACE:
+ size = 64 * kPointerSize * KB;
+ break;
+ case OLD_DATA_SPACE:
+ size = 192 * KB;
+ break;
+ case MAP_SPACE:
+ size = 128 * KB;
+ break;
+ case CELL_SPACE:
+ size = 96 * KB;
+ break;
+ case CODE_SPACE:
+ if (kPointerSize == 8) {
+ // On x64 we allocate code pages in a special way (from the reserved
+ // 2Byte area). That part of the code is not yet upgraded to handle
+ // small pages.
+ size = AreaSize();
+ } else {
+ size = 384 * KB;
+ }
+ break;
+ default:
+ UNREACHABLE();
+ }
+ return Min(size, AreaSize());
+}
+
+
int PagedSpace::CountTotalPages() {
PageIterator it(this);
int count = 0;
@@ -749,11 +905,32 @@ int PagedSpace::CountTotalPages() {
}
return count;
}
-#endif
void PagedSpace::ReleasePage(Page* page) {
ASSERT(page->LiveBytes() == 0);
+ ASSERT(AreaSize() == page->area_size());
+
+ // Adjust list of unswept pages if the page is the head of the list.
+ if (first_unswept_page_ == page) {
+ first_unswept_page_ = page->next_page();
+ if (first_unswept_page_ == anchor()) {
+ first_unswept_page_ = Page::FromAddress(NULL);
+ }
+ }
+
+ if (page->WasSwept()) {
+ intptr_t size = free_list_.EvictFreeListItems(page);
+ accounting_stats_.AllocateBytes(size);
+ ASSERT_EQ(AreaSize(), static_cast<int>(size));
+ } else {
+ DecreaseUnsweptFreeBytes(page);
+ }
+
+ if (Page::FromAllocationTop(allocation_info_.top) == page) {
+ allocation_info_.top = allocation_info_.limit = NULL;
+ }
+
page->Unlink();
if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) {
heap()->isolate()->memory_allocator()->Free(page);
@@ -762,8 +939,7 @@ void PagedSpace::ReleasePage(Page* page) {
}
ASSERT(Capacity() > 0);
- ASSERT(Capacity() % Page::kObjectAreaSize == 0);
- accounting_stats_.ShrinkSpace(Page::kObjectAreaSize);
+ accounting_stats_.ShrinkSpace(AreaSize());
}
@@ -771,8 +947,26 @@ void PagedSpace::ReleaseAllUnusedPages() {
PageIterator it(this);
while (it.has_next()) {
Page* page = it.next();
- if (page->LiveBytes() == 0) {
- ReleasePage(page);
+ if (!page->WasSwept()) {
+ if (page->LiveBytes() == 0) ReleasePage(page);
+ } else {
+ HeapObject* obj = HeapObject::FromAddress(page->area_start());
+ if (obj->IsFreeSpace() &&
+ FreeSpace::cast(obj)->size() == AreaSize()) {
+ // Sometimes we allocate memory from free list but don't
+ // immediately initialize it (e.g. see PagedSpace::ReserveSpace
+ // called from Heap::ReserveSpace that can cause GC before
+ // reserved space is actually initialized).
+ // Thus we can't simply assume that obj represents a valid
+ // node still owned by a free list
+ // Instead we should verify that the page is fully covered
+ // by free list items.
+ FreeList::SizeStats sizes;
+ free_list_.CountFreeListItems(page, &sizes);
+ if (sizes.Total() == AreaSize()) {
+ ReleasePage(page);
+ }
+ }
}
}
heap()->FreeQueuedChunks();
@@ -800,8 +994,8 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
}
ASSERT(page->WasSweptPrecisely());
HeapObjectIterator it(page, NULL);
- Address end_of_previous_object = page->ObjectAreaStart();
- Address top = page->ObjectAreaEnd();
+ Address end_of_previous_object = page->area_start();
+ Address top = page->area_end();
int black_size = 0;
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
ASSERT(end_of_previous_object <= object->address());
@@ -839,9 +1033,9 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// NewSpace implementation
-bool NewSpace::Setup(int reserved_semispace_capacity,
+bool NewSpace::SetUp(int reserved_semispace_capacity,
int maximum_semispace_capacity) {
- // Setup new space based on the preallocated memory block defined by
+ // Set up new space based on the preallocated memory block defined by
// start and size. The provided space is divided into two semi-spaces.
// To support fast containment testing in the new space, the size of
// this chunk must be a power of two and it must be aligned to its size.
@@ -860,7 +1054,7 @@ bool NewSpace::Setup(int reserved_semispace_capacity,
ASSERT(initial_semispace_capacity <= maximum_semispace_capacity);
ASSERT(IsPowerOf2(maximum_semispace_capacity));
- // Allocate and setup the histogram arrays if necessary.
+ // Allocate and set up the histogram arrays if necessary.
allocated_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
promoted_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1);
@@ -874,16 +1068,16 @@ bool NewSpace::Setup(int reserved_semispace_capacity,
2 * heap()->ReservedSemiSpaceSize());
ASSERT(IsAddressAligned(chunk_base_, 2 * reserved_semispace_capacity, 0));
- if (!to_space_.Setup(chunk_base_,
- initial_semispace_capacity,
- maximum_semispace_capacity)) {
- return false;
- }
- if (!from_space_.Setup(chunk_base_ + reserved_semispace_capacity,
- initial_semispace_capacity,
- maximum_semispace_capacity)) {
+ to_space_.SetUp(chunk_base_,
+ initial_semispace_capacity,
+ maximum_semispace_capacity);
+ from_space_.SetUp(chunk_base_ + reserved_semispace_capacity,
+ initial_semispace_capacity,
+ maximum_semispace_capacity);
+ if (!to_space_.Commit()) {
return false;
}
+ ASSERT(!from_space_.is_committed()); // No need to use memory yet.
start_ = chunk_base_;
address_mask_ = ~(2 * reserved_semispace_capacity - 1);
@@ -1012,16 +1206,51 @@ bool NewSpace::AddFreshPage() {
// Failed to get a new page in to-space.
return false;
}
+
// Clear remainder of current page.
- int remaining_in_page =
- static_cast<int>(NewSpacePage::FromLimit(top)->body_limit() - top);
+ Address limit = NewSpacePage::FromLimit(top)->area_end();
+ if (heap()->gc_state() == Heap::SCAVENGE) {
+ heap()->promotion_queue()->SetNewLimit(limit);
+ heap()->promotion_queue()->ActivateGuardIfOnTheSamePage();
+ }
+
+ int remaining_in_page = static_cast<int>(limit - top);
heap()->CreateFillerObjectAt(top, remaining_in_page);
pages_used_++;
UpdateAllocationInfo();
+
return true;
}
+MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) {
+ Address old_top = allocation_info_.top;
+ Address new_top = old_top + size_in_bytes;
+ Address high = to_space_.page_high();
+ if (allocation_info_.limit < high) {
+ // Incremental marking has lowered the limit to get a
+ // chance to do a step.
+ allocation_info_.limit = Min(
+ allocation_info_.limit + inline_allocation_limit_step_,
+ high);
+ int bytes_allocated = static_cast<int>(new_top - top_on_previous_step_);
+ heap()->incremental_marking()->Step(
+ bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD);
+ top_on_previous_step_ = new_top;
+ return AllocateRaw(size_in_bytes);
+ } else if (AddFreshPage()) {
+ // Switched to new page. Try allocating again.
+ int bytes_allocated = static_cast<int>(old_top - top_on_previous_step_);
+ heap()->incremental_marking()->Step(
+ bytes_allocated, IncrementalMarking::GC_VIA_STACK_GUARD);
+ top_on_previous_step_ = to_space_.page_low();
+ return AllocateRaw(size_in_bytes);
+ } else {
+ return Failure::RetryAfterGC();
+ }
+}
+
+
#ifdef DEBUG
// We do not use the SemiSpaceIterator because verification doesn't assume
// that it works (it depends on the invariants we are checking).
@@ -1031,7 +1260,7 @@ void NewSpace::Verify() {
// There should be objects packed in from the low address up to the
// allocation pointer.
- Address current = to_space_.first_page()->body();
+ Address current = to_space_.first_page()->area_start();
CHECK_EQ(current, to_space_.space_start());
while (current != top()) {
@@ -1066,7 +1295,7 @@ void NewSpace::Verify() {
NewSpacePage* page = NewSpacePage::FromLimit(current)->next_page();
// Next page should be valid.
CHECK(!page->is_anchor());
- current = page->body();
+ current = page->area_start();
}
}
@@ -1081,7 +1310,7 @@ void NewSpace::Verify() {
// -----------------------------------------------------------------------------
// SemiSpace implementation
-bool SemiSpace::Setup(Address start,
+void SemiSpace::SetUp(Address start,
int initial_capacity,
int maximum_capacity) {
// Creates a space in the young generation. The constructor does not
@@ -1100,8 +1329,6 @@ bool SemiSpace::Setup(Address start,
object_mask_ = address_mask_ | kHeapObjectTagMask;
object_expected_ = reinterpret_cast<uintptr_t>(start) | kHeapObjectTag;
age_mark_ = start_;
-
- return Commit();
}
@@ -1151,6 +1378,9 @@ bool SemiSpace::Uncommit() {
bool SemiSpace::GrowTo(int new_capacity) {
+ if (!is_committed()) {
+ if (!Commit()) return false;
+ }
ASSERT((new_capacity & Page::kPageAlignmentMask) == 0);
ASSERT(new_capacity <= maximum_capacity_);
ASSERT(new_capacity > capacity_);
@@ -1189,24 +1419,29 @@ bool SemiSpace::ShrinkTo(int new_capacity) {
ASSERT((new_capacity & Page::kPageAlignmentMask) == 0);
ASSERT(new_capacity >= initial_capacity_);
ASSERT(new_capacity < capacity_);
- // Semispaces grow backwards from the end of their allocated capacity,
- // so we find the before and after start addresses relative to the
- // end of the space.
- Address space_end = start_ + maximum_capacity_;
- Address old_start = space_end - capacity_;
- size_t delta = capacity_ - new_capacity;
- ASSERT(IsAligned(delta, OS::AllocateAlignment()));
- if (!heap()->isolate()->memory_allocator()->UncommitBlock(old_start, delta)) {
- return false;
+ if (is_committed()) {
+ // Semispaces grow backwards from the end of their allocated capacity,
+ // so we find the before and after start addresses relative to the
+ // end of the space.
+ Address space_end = start_ + maximum_capacity_;
+ Address old_start = space_end - capacity_;
+ size_t delta = capacity_ - new_capacity;
+ ASSERT(IsAligned(delta, OS::AllocateAlignment()));
+
+ MemoryAllocator* allocator = heap()->isolate()->memory_allocator();
+ if (!allocator->UncommitBlock(old_start, delta)) {
+ return false;
+ }
+
+ int pages_after = new_capacity / Page::kPageSize;
+ NewSpacePage* new_last_page =
+ NewSpacePage::FromAddress(space_end - pages_after * Page::kPageSize);
+ new_last_page->set_next_page(anchor());
+ anchor()->set_prev_page(new_last_page);
+ ASSERT((current_page_ <= first_page()) && (current_page_ >= new_last_page));
}
- capacity_ = new_capacity;
- int pages_after = capacity_ / Page::kPageSize;
- NewSpacePage* new_last_page =
- NewSpacePage::FromAddress(space_end - pages_after * Page::kPageSize);
- new_last_page->set_next_page(anchor());
- anchor()->set_prev_page(new_last_page);
- ASSERT((current_page_ <= first_page()) && (current_page_ >= new_last_page));
+ capacity_ = new_capacity;
return true;
}
@@ -1589,14 +1824,14 @@ void FreeListNode::set_size(Heap* heap, int size_in_bytes) {
// field and a next pointer, we give it a filler map that gives it the
// correct size.
if (size_in_bytes > FreeSpace::kHeaderSize) {
- set_map(heap->raw_unchecked_free_space_map());
+ set_map_no_write_barrier(heap->raw_unchecked_free_space_map());
// Can't use FreeSpace::cast because it fails during deserialization.
FreeSpace* this_as_free_space = reinterpret_cast<FreeSpace*>(this);
this_as_free_space->set_size(size_in_bytes);
} else if (size_in_bytes == kPointerSize) {
- set_map(heap->raw_unchecked_one_pointer_filler_map());
+ set_map_no_write_barrier(heap->raw_unchecked_one_pointer_filler_map());
} else if (size_in_bytes == 2 * kPointerSize) {
- set_map(heap->raw_unchecked_two_pointer_filler_map());
+ set_map_no_write_barrier(heap->raw_unchecked_two_pointer_filler_map());
} else {
UNREACHABLE();
}
@@ -1784,6 +2019,13 @@ HeapObject* FreeList::Allocate(int size_in_bytes) {
// skipped when scanning the heap. This also puts it back in the free list
// if it is big enough.
owner_->Free(owner_->top(), old_linear_size);
+
+#ifdef DEBUG
+ for (int i = 0; i < size_in_bytes / kPointerSize; i++) {
+ reinterpret_cast<Object**>(new_node->address())[i] = Smi::FromInt(0);
+ }
+#endif
+
owner_->heap()->incremental_marking()->OldSpaceStep(
size_in_bytes - old_linear_size);
@@ -1837,13 +2079,50 @@ static intptr_t CountFreeListItemsInList(FreeListNode* n, Page* p) {
}
-void FreeList::CountFreeListItems(Page* p, intptr_t* sizes) {
- sizes[0] = CountFreeListItemsInList(small_list_, p);
- sizes[1] = CountFreeListItemsInList(medium_list_, p);
- sizes[2] = CountFreeListItemsInList(large_list_, p);
- sizes[3] = CountFreeListItemsInList(huge_list_, p);
+void FreeList::CountFreeListItems(Page* p, SizeStats* sizes) {
+ sizes->huge_size_ = CountFreeListItemsInList(huge_list_, p);
+ if (sizes->huge_size_ < p->area_size()) {
+ sizes->small_size_ = CountFreeListItemsInList(small_list_, p);
+ sizes->medium_size_ = CountFreeListItemsInList(medium_list_, p);
+ sizes->large_size_ = CountFreeListItemsInList(large_list_, p);
+ } else {
+ sizes->small_size_ = 0;
+ sizes->medium_size_ = 0;
+ sizes->large_size_ = 0;
+ }
}
+
+static intptr_t EvictFreeListItemsInList(FreeListNode** n, Page* p) {
+ intptr_t sum = 0;
+ while (*n != NULL) {
+ if (Page::FromAddress((*n)->address()) == p) {
+ FreeSpace* free_space = reinterpret_cast<FreeSpace*>(*n);
+ sum += free_space->Size();
+ *n = (*n)->next();
+ } else {
+ n = (*n)->next_address();
+ }
+ }
+ return sum;
+}
+
+
+intptr_t FreeList::EvictFreeListItems(Page* p) {
+ intptr_t sum = EvictFreeListItemsInList(&huge_list_, p);
+
+ if (sum < p->area_size()) {
+ sum += EvictFreeListItemsInList(&small_list_, p) +
+ EvictFreeListItemsInList(&medium_list_, p) +
+ EvictFreeListItemsInList(&large_list_, p);
+ }
+
+ available_ -= static_cast<int>(sum);
+
+ return sum;
+}
+
+
#ifdef DEBUG
intptr_t FreeList::SumFreeList(FreeListNode* cur) {
intptr_t sum = 0;
@@ -1904,7 +2183,7 @@ bool NewSpace::ReserveSpace(int bytes) {
// marking. The most reliable way to ensure that there is linear space is
// to do the allocation, then rewind the limit.
ASSERT(bytes <= InitialCapacity());
- MaybeObject* maybe = AllocateRawInternal(bytes);
+ MaybeObject* maybe = AllocateRaw(bytes);
Object* object = NULL;
if (!maybe->ToObject(&object)) return false;
HeapObject* allocation = HeapObject::cast(object);
@@ -1930,7 +2209,6 @@ void PagedSpace::PrepareForMarkCompact() {
// Stop lazy sweeping and clear marking bits for unswept pages.
if (first_unswept_page_ != NULL) {
- Page* last = last_unswept_page_;
Page* p = first_unswept_page_;
do {
// Do not use ShouldBeSweptLazily predicate here.
@@ -1944,9 +2222,10 @@ void PagedSpace::PrepareForMarkCompact() {
}
}
p = p->next_page();
- } while (p != last);
+ } while (p != anchor());
}
- first_unswept_page_ = last_unswept_page_ = Page::FromAddress(NULL);
+ first_unswept_page_ = Page::FromAddress(NULL);
+ unswept_free_bytes_ = 0;
// Clear the free list before a full GC---it will be rebuilt afterward.
free_list_.Reset();
@@ -1954,7 +2233,7 @@ void PagedSpace::PrepareForMarkCompact() {
bool PagedSpace::ReserveSpace(int size_in_bytes) {
- ASSERT(size_in_bytes <= Page::kMaxHeapObjectSize);
+ ASSERT(size_in_bytes <= AreaSize());
ASSERT(size_in_bytes == RoundSizeDownToObjectAlignment(size_in_bytes));
Address current_top = allocation_info_.top;
Address new_top = current_top + size_in_bytes;
@@ -1979,7 +2258,9 @@ bool PagedSpace::ReserveSpace(int size_in_bytes) {
// You have to call this last, since the implementation from PagedSpace
// doesn't know that memory was 'promised' to large object space.
bool LargeObjectSpace::ReserveSpace(int bytes) {
- return heap()->OldGenerationSpaceAvailable() >= bytes;
+ return heap()->OldGenerationCapacityAvailable() >= bytes &&
+ (!heap()->incremental_marking()->IsStopped() ||
+ heap()->OldGenerationSpaceAvailable() >= bytes);
}
@@ -1987,7 +2268,6 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
if (IsSweepingComplete()) return true;
intptr_t freed_bytes = 0;
- Page* last = last_unswept_page_;
Page* p = first_unswept_page_;
do {
Page* next_page = p->next_page();
@@ -1996,13 +2276,14 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
PrintF("Sweeping 0x%" V8PRIxPTR " lazily advanced.\n",
reinterpret_cast<intptr_t>(p));
}
+ DecreaseUnsweptFreeBytes(p);
freed_bytes += MarkCompactCollector::SweepConservatively(this, p);
}
p = next_page;
- } while (p != last && freed_bytes < bytes_to_sweep);
+ } while (p != anchor() && freed_bytes < bytes_to_sweep);
- if (p == last) {
- last_unswept_page_ = first_unswept_page_ = Page::FromAddress(NULL);
+ if (p == anchor()) {
+ first_unswept_page_ = Page::FromAddress(NULL);
} else {
first_unswept_page_ = p;
}
@@ -2018,7 +2299,7 @@ bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) {
void PagedSpace::EvictEvacuationCandidatesFromFreeLists() {
if (allocation_info_.top >= allocation_info_.limit) return;
- if (Page::FromAddress(allocation_info_.top)->IsEvacuationCandidate()) {
+ if (Page::FromAllocationTop(allocation_info_.top)->IsEvacuationCandidate()) {
// Create filler object to keep page iterable if it was iterable.
int remaining =
static_cast<int>(allocation_info_.limit - allocation_info_.top);
@@ -2033,29 +2314,22 @@ void PagedSpace::EvictEvacuationCandidatesFromFreeLists() {
HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) {
// Allocation in this space has failed.
- // Free list allocation failed and there is no next page. Fail if we have
- // hit the old generation size limit that should cause a garbage
- // collection.
- if (!heap()->always_allocate() &&
- heap()->OldGenerationAllocationLimitReached()) {
- return NULL;
- }
-
- // If there are unswept pages advance lazy sweeper.
+ // If there are unswept pages advance lazy sweeper then sweep one page before
+ // allocating a new page.
if (first_unswept_page_->is_valid()) {
AdvanceSweeper(size_in_bytes);
// Retry the free list allocation.
HeapObject* object = free_list_.Allocate(size_in_bytes);
if (object != NULL) return object;
+ }
- if (!IsSweepingComplete()) {
- AdvanceSweeper(kMaxInt);
-
- // Retry the free list allocation.
- object = free_list_.Allocate(size_in_bytes);
- if (object != NULL) return object;
- }
+ // Free list allocation failed and there is no next page. Fail if we have
+ // hit the old generation size limit that should cause a garbage
+ // collection.
+ if (!heap()->always_allocate() &&
+ heap()->OldGenerationAllocationLimitReached()) {
+ return NULL;
}
// Try to expand the space and allocate in the new next page.
@@ -2063,6 +2337,16 @@ HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) {
return free_list_.Allocate(size_in_bytes);
}
+ // Last ditch, sweep all the remaining pages to try to find space. This may
+ // cause a pause.
+ if (!IsSweepingComplete()) {
+ AdvanceSweeper(kMaxInt);
+
+ // Retry the free list allocation.
+ HeapObject* object = free_list_.Allocate(size_in_bytes);
+ if (object != NULL) return object;
+ }
+
// Finally, fail.
return NULL;
}
@@ -2279,6 +2563,10 @@ HeapObject* LargeObjectIterator::Next() {
// -----------------------------------------------------------------------------
// LargeObjectSpace
+static bool ComparePointers(void* key1, void* key2) {
+ return key1 == key2;
+}
+
LargeObjectSpace::LargeObjectSpace(Heap* heap,
intptr_t max_capacity,
@@ -2288,14 +2576,16 @@ LargeObjectSpace::LargeObjectSpace(Heap* heap,
first_page_(NULL),
size_(0),
page_count_(0),
- objects_size_(0) {}
+ objects_size_(0),
+ chunk_map_(ComparePointers, 1024) {}
-bool LargeObjectSpace::Setup() {
+bool LargeObjectSpace::SetUp() {
first_page_ = NULL;
size_ = 0;
page_count_ = 0;
objects_size_ = 0;
+ chunk_map_.Clear();
return true;
}
@@ -2311,7 +2601,7 @@ void LargeObjectSpace::TearDown() {
space, kAllocationActionFree, page->size());
heap()->isolate()->memory_allocator()->Free(page);
}
- Setup();
+ SetUp();
}
@@ -2329,9 +2619,9 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
}
LargePage* page = heap()->isolate()->memory_allocator()->
- AllocateLargePage(object_size, executable, this);
+ AllocateLargePage(object_size, this, executable);
if (page == NULL) return Failure::RetryAfterGC(identity());
- ASSERT(page->body_size() >= object_size);
+ ASSERT(page->area_size() >= object_size);
size_ += static_cast<int>(page->size());
objects_size_ += object_size;
@@ -2339,34 +2629,53 @@ MaybeObject* LargeObjectSpace::AllocateRaw(int object_size,
page->set_next_page(first_page_);
first_page_ = page;
+ // Register all MemoryChunk::kAlignment-aligned chunks covered by
+ // this large page in the chunk map.
+ uintptr_t base = reinterpret_cast<uintptr_t>(page) / MemoryChunk::kAlignment;
+ uintptr_t limit = base + (page->size() - 1) / MemoryChunk::kAlignment;
+ for (uintptr_t key = base; key <= limit; key++) {
+ HashMap::Entry* entry = chunk_map_.Lookup(reinterpret_cast<void*>(key),
+ static_cast<uint32_t>(key),
+ true);
+ ASSERT(entry != NULL);
+ entry->value = page;
+ }
+
+ HeapObject* object = page->GetObject();
+
+#ifdef DEBUG
+ // Make the object consistent so the heap can be vefified in OldSpaceStep.
+ reinterpret_cast<Object**>(object->address())[0] =
+ heap()->fixed_array_map();
+ reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
+#endif
+
heap()->incremental_marking()->OldSpaceStep(object_size);
- return page->GetObject();
+ return object;
}
// GC support
MaybeObject* LargeObjectSpace::FindObject(Address a) {
- for (LargePage* page = first_page_;
- page != NULL;
- page = page->next_page()) {
- Address page_address = page->address();
- if (page_address <= a && a < page_address + page->size()) {
- return page->GetObject();
- }
+ LargePage* page = FindPage(a);
+ if (page != NULL) {
+ return page->GetObject();
}
return Failure::Exception();
}
-LargePage* LargeObjectSpace::FindPageContainingPc(Address pc) {
- // TODO(853): Change this implementation to only find executable
- // chunks and use some kind of hash-based approach to speed it up.
- for (LargePage* chunk = first_page_;
- chunk != NULL;
- chunk = chunk->next_page()) {
- Address chunk_address = chunk->address();
- if (chunk_address <= pc && pc < chunk_address + chunk->size()) {
- return chunk;
+LargePage* LargeObjectSpace::FindPage(Address a) {
+ uintptr_t key = reinterpret_cast<uintptr_t>(a) / MemoryChunk::kAlignment;
+ HashMap::Entry* e = chunk_map_.Lookup(reinterpret_cast<void*>(key),
+ static_cast<uint32_t>(key),
+ false);
+ if (e != NULL) {
+ ASSERT(e->value != NULL);
+ LargePage* page = reinterpret_cast<LargePage*>(e->value);
+ ASSERT(page->is_valid());
+ if (page->Contains(a)) {
+ return page;
}
}
return NULL;
@@ -2384,7 +2693,7 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
MarkBit mark_bit = Marking::MarkBitFrom(object);
if (mark_bit.Get()) {
mark_bit.Clear();
- MemoryChunk::IncrementLiveBytes(object->address(), -object->Size());
+ MemoryChunk::IncrementLiveBytesFromGC(object->address(), -object->Size());
previous = current;
current = current->next_page();
} else {
@@ -2404,6 +2713,17 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
objects_size_ -= object->Size();
page_count_--;
+ // Remove entries belonging to this page.
+ // Use variable alignment to help pass length check (<= 80 characters)
+ // of single line in tools/presubmit.py.
+ const intptr_t alignment = MemoryChunk::kAlignment;
+ uintptr_t base = reinterpret_cast<uintptr_t>(page)/alignment;
+ uintptr_t limit = base + (page->size()-1)/alignment;
+ for (uintptr_t key = base; key <= limit; key++) {
+ chunk_map_.Remove(reinterpret_cast<void*>(key),
+ static_cast<uint32_t>(key));
+ }
+
if (is_pointer_object) {
heap()->QueueMemoryChunkForFree(page);
} else {
@@ -2438,7 +2758,7 @@ void LargeObjectSpace::Verify() {
// object area start.
HeapObject* object = chunk->GetObject();
Page* page = Page::FromAddress(object->address());
- ASSERT(object->address() == page->ObjectAreaStart());
+ ASSERT(object->address() == page->area_start());
// The first word should be a map, and we expect all map pointers to be
// in map space.
diff --git a/src/3rdparty/v8/src/spaces.h b/src/3rdparty/v8/src/spaces.h
index 45e008c..90f62f3 100644
--- a/src/3rdparty/v8/src/spaces.h
+++ b/src/3rdparty/v8/src/spaces.h
@@ -29,6 +29,7 @@
#define V8_SPACES_H_
#include "allocation.h"
+#include "hashmap.h"
#include "list.h"
#include "log.h"
@@ -103,7 +104,7 @@ class Isolate;
ASSERT((OffsetFrom(address) & kMapAlignmentMask) == 0)
#define ASSERT_OBJECT_SIZE(size) \
- ASSERT((0 < size) && (size <= Page::kMaxHeapObjectSize))
+ ASSERT((0 < size) && (size <= Page::kMaxNonCodeHeapObjectSize))
#define ASSERT_PAGE_OFFSET(offset) \
ASSERT((Page::kObjectStartOffset <= offset) \
@@ -295,7 +296,7 @@ class SlotsBuffer;
// MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always
-// 1MB aligned. Start of the body is aligned so it can accomodate
+// 1MB aligned. Start of the body is aligned so it can accommodate
// any heap object.
class MemoryChunk {
public:
@@ -361,21 +362,15 @@ class MemoryChunk {
store_buffer_counter_ = counter;
}
- Address body() { return address() + kObjectStartOffset; }
-
- Address body_limit() { return address() + size(); }
-
- int body_size() { return static_cast<int>(size() - kObjectStartOffset); }
-
bool Contains(Address addr) {
- return addr >= body() && addr < address() + size();
+ return addr >= area_start() && addr < area_end();
}
// Checks whether addr can be a limit of addresses in this page.
// It's a limit if it's in the page, or if it's just after the
// last byte of the page.
bool ContainsLimit(Address addr) {
- return addr >= body() && addr <= address() + size();
+ return addr >= area_start() && addr <= area_end();
}
enum MemoryChunkFlags {
@@ -472,10 +467,13 @@ class MemoryChunk {
ASSERT(static_cast<unsigned>(live_byte_count_) <= size_);
return live_byte_count_;
}
- static void IncrementLiveBytes(Address address, int by) {
+
+ static void IncrementLiveBytesFromGC(Address address, int by) {
MemoryChunk::FromAddress(address)->IncrementLiveBytes(by);
}
+ static void IncrementLiveBytesFromMutator(Address address, int by);
+
static const intptr_t kAlignment =
(static_cast<uintptr_t>(1) << kPageSizeBits);
@@ -484,8 +482,9 @@ class MemoryChunk {
static const intptr_t kSizeOffset = kPointerSize + kPointerSize;
static const intptr_t kLiveBytesOffset =
- kSizeOffset + kPointerSize + kPointerSize + kPointerSize +
- kPointerSize + kPointerSize + kPointerSize + kIntSize;
+ kSizeOffset + kPointerSize + kPointerSize + kPointerSize +
+ kPointerSize + kPointerSize +
+ kPointerSize + kPointerSize + kPointerSize + kIntSize;
static const size_t kSlotsBufferOffset = kLiveBytesOffset + kIntSize;
@@ -504,6 +503,15 @@ class MemoryChunk {
size_t size() const { return size_; }
+ void set_size(size_t size) {
+ size_ = size;
+ }
+
+ void SetArea(Address area_start, Address area_end) {
+ area_start_ = area_start;
+ area_end_ = area_end;
+ }
+
Executability executable() {
return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
}
@@ -587,12 +595,22 @@ class MemoryChunk {
ClearFlag(EVACUATION_CANDIDATE);
}
+ Address area_start() { return area_start_; }
+ Address area_end() { return area_end_; }
+ int area_size() {
+ return static_cast<int>(area_end() - area_start());
+ }
protected:
MemoryChunk* next_chunk_;
MemoryChunk* prev_chunk_;
size_t size_;
intptr_t flags_;
+
+ // Start and end of allocatable memory on this chunk.
+ Address area_start_;
+ Address area_end_;
+
// If the chunk needs to remember its memory reservation, it is stored here.
VirtualMemory reservation_;
// The identity of the owning space. This is tagged as a failure pointer, but
@@ -611,14 +629,18 @@ class MemoryChunk {
static MemoryChunk* Initialize(Heap* heap,
Address base,
size_t size,
+ Address area_start,
+ Address area_end,
Executability executable,
Space* owner);
friend class MemoryAllocator;
};
+
STATIC_CHECK(sizeof(MemoryChunk) <= MemoryChunk::kHeaderSize);
+
// -----------------------------------------------------------------------------
// A page is a memory chunk of a size 1MB. Large object pages may be larger.
//
@@ -650,12 +672,6 @@ class Page : public MemoryChunk {
inline void set_next_page(Page* page);
inline void set_prev_page(Page* page);
- // Returns the start address of the object area in this page.
- Address ObjectAreaStart() { return address() + kObjectStartOffset; }
-
- // Returns the end address (exclusive) of the object area in this page.
- Address ObjectAreaEnd() { return address() + Page::kPageSize; }
-
// Checks whether an address is page aligned.
static bool IsAlignedToPageSize(Address a) {
return 0 == (OffsetFrom(a) & kPageAlignmentMask);
@@ -678,21 +694,14 @@ class Page : public MemoryChunk {
// Page size in bytes. This must be a multiple of the OS page size.
static const int kPageSize = 1 << kPageSizeBits;
- // Page size mask.
- static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
-
// Object area size in bytes.
- static const int kObjectAreaSize = kPageSize - kObjectStartOffset;
+ static const int kNonCodeObjectAreaSize = kPageSize - kObjectStartOffset;
// Maximum object size that fits in a page.
- static const int kMaxHeapObjectSize = kObjectAreaSize;
-
- static const int kFirstUsedCell =
- (kObjectStartOffset/kPointerSize) >> Bitmap::kBitsPerCellLog2;
+ static const int kMaxNonCodeHeapObjectSize = kNonCodeObjectAreaSize;
- static const int kLastUsedCell =
- ((kPageSize - kPointerSize)/kPointerSize) >>
- Bitmap::kBitsPerCellLog2;
+ // Page size mask.
+ static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
inline void ClearGCFields();
@@ -727,7 +736,7 @@ STATIC_CHECK(sizeof(Page) <= MemoryChunk::kHeaderSize);
class LargePage : public MemoryChunk {
public:
HeapObject* GetObject() {
- return HeapObject::FromAddress(body());
+ return HeapObject::FromAddress(area_start());
}
inline LargePage* next_page() const {
@@ -811,7 +820,7 @@ class CodeRange {
// Reserves a range of virtual memory, but does not commit any of it.
// Can only be called once, at heap initialization time.
// Returns false on failure.
- bool Setup(const size_t requested_size);
+ bool SetUp(const size_t requested_size);
// Frees the range of virtual memory, and frees the data structures used to
// manage it.
@@ -939,15 +948,15 @@ class MemoryAllocator {
// Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit.
- bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
+ bool SetUp(intptr_t max_capacity, intptr_t capacity_executable);
void TearDown();
- Page* AllocatePage(PagedSpace* owner, Executability executable);
+ Page* AllocatePage(
+ intptr_t size, PagedSpace* owner, Executability executable);
- LargePage* AllocateLargePage(intptr_t object_size,
- Executability executable,
- Space* owner);
+ LargePage* AllocateLargePage(
+ intptr_t object_size, Space* owner, Executability executable);
void Free(MemoryChunk* chunk);
@@ -968,7 +977,7 @@ class MemoryAllocator {
// Returns maximum available bytes that the old space can have.
intptr_t MaxAvailable() {
- return (Available() / Page::kPageSize) * Page::kObjectAreaSize;
+ return (Available() / Page::kPageSize) * Page::kMaxNonCodeHeapObjectSize;
}
#ifdef DEBUG
@@ -1021,6 +1030,20 @@ class MemoryAllocator {
bool MemoryAllocationCallbackRegistered(
MemoryAllocationCallback callback);
+ static int CodePageGuardStartOffset();
+
+ static int CodePageGuardSize();
+
+ static int CodePageAreaStartOffset();
+
+ static int CodePageAreaEndOffset();
+
+ static int CodePageAreaSize() {
+ return CodePageAreaEndOffset() - CodePageAreaStartOffset();
+ }
+
+ static bool CommitCodePage(VirtualMemory* vm, Address start, size_t size);
+
private:
Isolate* isolate_;
@@ -1177,11 +1200,11 @@ class AllocationInfo {
// An abstraction of the accounting statistics of a page-structured space.
-// The 'capacity' of a space is the number of object-area bytes (ie, not
+// The 'capacity' of a space is the number of object-area bytes (i.e., not
// including page bookkeeping structures) currently in the space. The 'size'
// of a space is the number of allocated bytes, the 'waste' in the space is
// the number of bytes that are not allocated and not available to
-// allocation without reorganizing the space via a GC (eg, small blocks due
+// allocation without reorganizing the space via a GC (e.g. small blocks due
// to internal fragmentation, top of page areas in map space), and the bytes
// 'available' is the number of unallocated bytes that are not waste. The
// capacity is the sum of size, waste, and available.
@@ -1194,7 +1217,7 @@ class AllocationStats BASE_EMBEDDED {
public:
AllocationStats() { Clear(); }
- // Zero out all the allocation statistics (ie, no capacity).
+ // Zero out all the allocation statistics (i.e., no capacity).
void Clear() {
capacity_ = 0;
size_ = 0;
@@ -1206,7 +1229,7 @@ class AllocationStats BASE_EMBEDDED {
waste_ = 0;
}
- // Reset the allocation statistics (ie, available = capacity with no
+ // Reset the allocation statistics (i.e., available = capacity with no
// wasted or allocated bytes).
void Reset() {
size_ = 0;
@@ -1337,7 +1360,7 @@ class FreeList BASE_EMBEDDED {
// starting at 'start' is placed on the free list. The return value is the
// number of bytes that have been lost due to internal fragmentation by
// freeing the block. Bookkeeping information will be written to the block,
- // ie, its contents will be destroyed. The start address should be word
+ // i.e., its contents will be destroyed. The start address should be word
// aligned, and the size should be a non-zero multiple of the word size.
int Free(Address start, int size_in_bytes);
@@ -1347,8 +1370,6 @@ class FreeList BASE_EMBEDDED {
// 'wasted_bytes'. The size should be a non-zero multiple of the word size.
MUST_USE_RESULT HeapObject* Allocate(int size_in_bytes);
- void MarkNodes();
-
#ifdef DEBUG
void Zap();
static intptr_t SumFreeList(FreeListNode* node);
@@ -1357,12 +1378,25 @@ class FreeList BASE_EMBEDDED {
bool IsVeryLong();
#endif
- void CountFreeListItems(Page* p, intptr_t* sizes);
+ struct SizeStats {
+ intptr_t Total() {
+ return small_size_ + medium_size_ + large_size_ + huge_size_;
+ }
+
+ intptr_t small_size_;
+ intptr_t medium_size_;
+ intptr_t large_size_;
+ intptr_t huge_size_;
+ };
+
+ void CountFreeListItems(Page* p, SizeStats* sizes);
+
+ intptr_t EvictFreeListItems(Page* p);
private:
// The size range of blocks, in bytes.
static const int kMinBlockSize = 3 * kPointerSize;
- static const int kMaxBlockSize = Page::kMaxHeapObjectSize;
+ static const int kMaxBlockSize = Page::kMaxNonCodeHeapObjectSize;
FreeListNode* PickNodeFromList(FreeListNode** list, int* node_size);
@@ -1404,11 +1438,11 @@ class PagedSpace : public Space {
// the memory allocator's initial chunk) if possible. If the block of
// addresses is not big enough to contain a single page-aligned page, a
// fresh chunk will be allocated.
- bool Setup();
+ bool SetUp();
// Returns true if the space has been successfully set up and not
// subsequently torn down.
- bool HasBeenSetup();
+ bool HasBeenSetUp();
// Cleans up the space, frees all pages in this space except those belonging
// to the initial chunk, uncommits addresses in the initial chunk.
@@ -1454,9 +1488,12 @@ class PagedSpace : public Space {
// linear allocation area (between top and limit) are also counted here.
virtual intptr_t Size() { return accounting_stats_.Size(); }
- // As size, but the bytes in the current linear allocation area are not
- // included.
- virtual intptr_t SizeOfObjects() { return Size() - (limit() - top()); }
+ // As size, but the bytes in lazily swept pages are estimated and the bytes
+ // in the current linear allocation area are not included.
+ virtual intptr_t SizeOfObjects() {
+ ASSERT(!IsSweepingComplete() || (unswept_free_bytes_ == 0));
+ return Size() - unswept_free_bytes_ - (limit() - top());
+ }
// Wasted bytes in this space. These are just the bytes that were thrown away
// due to being too small to use for allocation. They do not include the
@@ -1464,9 +1501,7 @@ class PagedSpace : public Space {
virtual intptr_t Waste() { return accounting_stats_.Waste(); }
// Returns the allocation pointer in this space.
- Address top() {
- return allocation_info_.top;
- }
+ Address top() { return allocation_info_.top; }
Address limit() { return allocation_info_.limit; }
// Allocate the requested number of bytes in the space if possible, return a
@@ -1541,9 +1576,24 @@ class PagedSpace : public Space {
!p->WasSweptPrecisely();
}
- void SetPagesToSweep(Page* first, Page* last) {
+ void SetPagesToSweep(Page* first) {
+ ASSERT(unswept_free_bytes_ == 0);
+ if (first == &anchor_) first = NULL;
first_unswept_page_ = first;
- last_unswept_page_ = last;
+ }
+
+ void IncrementUnsweptFreeBytes(int by) {
+ unswept_free_bytes_ += by;
+ }
+
+ void IncreaseUnsweptFreeBytes(Page* p) {
+ ASSERT(ShouldBeSweptLazily(p));
+ unswept_free_bytes_ += (p->area_size() - p->LiveBytes());
+ }
+
+ void DecreaseUnsweptFreeBytes(Page* p) {
+ ASSERT(ShouldBeSweptLazily(p));
+ unswept_free_bytes_ -= (p->area_size() - p->LiveBytes());
}
bool AdvanceSweeper(intptr_t bytes_to_sweep);
@@ -1555,47 +1605,30 @@ class PagedSpace : public Space {
Page* FirstPage() { return anchor_.next_page(); }
Page* LastPage() { return anchor_.prev_page(); }
- bool IsFragmented(Page* p) {
- intptr_t sizes[4];
+ void CountFreeListItems(Page* p, FreeList::SizeStats* sizes) {
free_list_.CountFreeListItems(p, sizes);
-
- intptr_t ratio;
- intptr_t ratio_threshold;
- if (identity() == CODE_SPACE) {
- ratio = (sizes[1] * 10 + sizes[2] * 2) * 100 / Page::kObjectAreaSize;
- ratio_threshold = 10;
- } else {
- ratio = (sizes[0] * 5 + sizes[1]) * 100 / Page::kObjectAreaSize;
- ratio_threshold = 15;
- }
-
- if (FLAG_trace_fragmentation) {
- PrintF("%p [%d]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n",
- reinterpret_cast<void*>(p),
- identity(),
- static_cast<int>(sizes[0]),
- static_cast<double>(sizes[0] * 100) / Page::kObjectAreaSize,
- static_cast<int>(sizes[1]),
- static_cast<double>(sizes[1] * 100) / Page::kObjectAreaSize,
- static_cast<int>(sizes[2]),
- static_cast<double>(sizes[2] * 100) / Page::kObjectAreaSize,
- static_cast<int>(sizes[3]),
- static_cast<double>(sizes[3] * 100) / Page::kObjectAreaSize,
- (ratio > ratio_threshold) ? "[fragmented]" : "");
- }
-
- return (ratio > ratio_threshold) ||
- (FLAG_always_compact && sizes[3] != Page::kObjectAreaSize);
}
void EvictEvacuationCandidatesFromFreeLists();
bool CanExpand();
+ // Returns the number of total pages in this space.
+ int CountTotalPages();
+
+ // Return size of allocatable area on a page in this space.
+ inline int AreaSize() {
+ return area_size_;
+ }
+
protected:
+ int area_size_;
+
// Maximum capacity of this space.
intptr_t max_capacity_;
+ intptr_t SizeOfFirstPage();
+
// Accounting information for this space.
AllocationStats accounting_stats_;
@@ -1616,11 +1649,18 @@ class PagedSpace : public Space {
bool was_swept_conservatively_;
+ // The first page to be swept when the lazy sweeper advances. Is set
+ // to NULL when all pages have been swept.
Page* first_unswept_page_;
- Page* last_unswept_page_;
+
+ // The number of free bytes which could be reclaimed by advancing the
+ // lazy sweeper. This is only an estimation because lazy sweeping is
+ // done conservatively.
+ intptr_t unswept_free_bytes_;
// Expands the space by allocating a fixed number of pages. Returns false if
- // it cannot allocate requested number of pages from OS.
+ // it cannot allocate requested number of pages from OS, or if the hard heap
+ // size limit has been hit.
bool Expand();
// Generic fast case allocation function that tries linear allocation at the
@@ -1630,11 +1670,6 @@ class PagedSpace : public Space {
// Slow path of AllocateRaw. This function is space-dependent.
MUST_USE_RESULT virtual HeapObject* SlowAllocateRaw(int size_in_bytes);
-#ifdef DEBUG
- // Returns the number of total pages in this space.
- int CountTotalPages();
-#endif
-
friend class PageIterator;
};
@@ -1692,6 +1727,8 @@ class NewSpacePage : public MemoryChunk {
(1 << MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING) |
(1 << MemoryChunk::SCAN_ON_SCAVENGE);
+ static const int kAreaSize = Page::kNonCodeObjectAreaSize;
+
inline NewSpacePage* next_page() const {
return static_cast<NewSpacePage*>(next_chunk());
}
@@ -1782,14 +1819,14 @@ class SemiSpace : public Space {
current_page_(NULL) { }
// Sets up the semispace using the given chunk.
- bool Setup(Address start, int initial_capacity, int maximum_capacity);
+ void SetUp(Address start, int initial_capacity, int maximum_capacity);
// Tear down the space. Heap memory was not allocated by the space, so it
// is not deallocated here.
void TearDown();
// True if the space has been set up but not torn down.
- bool HasBeenSetup() { return start_ != NULL; }
+ bool HasBeenSetUp() { return start_ != NULL; }
// Grow the semispace to the new capacity. The new capacity
// requested must be larger than the current capacity and less than
@@ -1804,22 +1841,22 @@ class SemiSpace : public Space {
// Returns the start address of the first page of the space.
Address space_start() {
ASSERT(anchor_.next_page() != &anchor_);
- return anchor_.next_page()->body();
+ return anchor_.next_page()->area_start();
}
// Returns the start address of the current page of the space.
Address page_low() {
- return current_page_->body();
+ return current_page_->area_start();
}
// Returns one past the end address of the space.
Address space_end() {
- return anchor_.prev_page()->body_limit();
+ return anchor_.prev_page()->area_end();
}
// Returns one past the end address of the current page of the space.
Address page_high() {
- return current_page_->body_limit();
+ return current_page_->area_end();
}
bool AdvancePage() {
@@ -1955,7 +1992,7 @@ class SemiSpaceIterator : public ObjectIterator {
NewSpacePage* page = NewSpacePage::FromLimit(current_);
page = page->next_page();
ASSERT(!page->is_anchor());
- current_ = page->body();
+ current_ = page->area_start();
if (current_ == limit_) return NULL;
}
@@ -2028,15 +2065,15 @@ class NewSpace : public Space {
inline_allocation_limit_step_(0) {}
// Sets up the new space using the given chunk.
- bool Setup(int reserved_semispace_size_, int max_semispace_size);
+ bool SetUp(int reserved_semispace_size_, int max_semispace_size);
// Tears down the space. Heap memory was not allocated by the space, so it
// is not deallocated here.
void TearDown();
// True if the space has been set up but not torn down.
- bool HasBeenSetup() {
- return to_space_.HasBeenSetup() && from_space_.HasBeenSetup();
+ bool HasBeenSetUp() {
+ return to_space_.HasBeenSetUp() && from_space_.HasBeenSetUp();
}
// Flip the pair of spaces.
@@ -2063,7 +2100,7 @@ class NewSpace : public Space {
// Return the allocated bytes in the active semispace.
virtual intptr_t Size() {
- return pages_used_ * Page::kObjectAreaSize +
+ return pages_used_ * NewSpacePage::kAreaSize +
static_cast<int>(top() - to_space_.page_low());
}
@@ -2075,7 +2112,7 @@ class NewSpace : public Space {
// Return the current capacity of a semispace.
intptr_t EffectiveCapacity() {
SLOW_ASSERT(to_space_.Capacity() == from_space_.Capacity());
- return (to_space_.Capacity() / Page::kPageSize) * Page::kObjectAreaSize;
+ return (to_space_.Capacity() / Page::kPageSize) * NewSpacePage::kAreaSize;
}
// Return the current capacity of a semispace.
@@ -2140,9 +2177,7 @@ class NewSpace : public Space {
Address* allocation_top_address() { return &allocation_info_.top; }
Address* allocation_limit_address() { return &allocation_info_.limit; }
- MUST_USE_RESULT MaybeObject* AllocateRaw(int size_in_bytes) {
- return AllocateRawInternal(size_in_bytes);
- }
+ MUST_USE_RESULT INLINE(MaybeObject* AllocateRaw(int size_in_bytes));
// Reset the allocation pointer to the beginning of the active semispace.
void ResetAllocationInfo();
@@ -2268,8 +2303,7 @@ class NewSpace : public Space {
HistogramInfo* allocated_histogram_;
HistogramInfo* promoted_histogram_;
- // Implementation of AllocateRaw.
- MUST_USE_RESULT inline MaybeObject* AllocateRawInternal(int size_in_bytes);
+ MUST_USE_RESULT MaybeObject* SlowAllocateRaw(int size_in_bytes);
friend class SemiSpaceIterator;
@@ -2295,7 +2329,7 @@ class OldSpace : public PagedSpace {
// The limit of allocation for a page in this space.
virtual Address PageAllocationLimit(Page* page) {
- return page->ObjectAreaEnd();
+ return page->area_end();
}
public:
@@ -2324,12 +2358,12 @@ class FixedSpace : public PagedSpace {
: PagedSpace(heap, max_capacity, id, NOT_EXECUTABLE),
object_size_in_bytes_(object_size_in_bytes),
name_(name) {
- page_extra_ = Page::kObjectAreaSize % object_size_in_bytes;
+ page_extra_ = Page::kNonCodeObjectAreaSize % object_size_in_bytes;
}
// The limit of allocation for a page in this space.
virtual Address PageAllocationLimit(Page* page) {
- return page->ObjectAreaEnd() - page_extra_;
+ return page->area_end() - page_extra_;
}
int object_size_in_bytes() { return object_size_in_bytes_; }
@@ -2337,8 +2371,6 @@ class FixedSpace : public PagedSpace {
// Prepares for a mark-compact GC.
virtual void PrepareForMarkCompact();
- void MarkFreeListNodes() { free_list_.MarkNodes(); }
-
protected:
void ResetFreeList() {
free_list_.Reset();
@@ -2359,12 +2391,9 @@ class FixedSpace : public PagedSpace {
class MapSpace : public FixedSpace {
public:
// Creates a map space object with a maximum capacity.
- MapSpace(Heap* heap,
- intptr_t max_capacity,
- int max_map_space_pages,
- AllocationSpace id)
+ MapSpace(Heap* heap, intptr_t max_capacity, AllocationSpace id)
: FixedSpace(heap, max_capacity, id, Map::kSize, "map"),
- max_map_space_pages_(max_map_space_pages) {
+ max_map_space_pages_(kMaxMapPageIndex - 1) {
}
// Given an index, returns the page address.
@@ -2385,7 +2414,7 @@ class MapSpace : public FixedSpace {
#endif
private:
- static const int kMapsPerPage = Page::kObjectAreaSize / Map::kSize;
+ static const int kMapsPerPage = Page::kNonCodeObjectAreaSize / Map::kSize;
// Do map space compaction if there is a page gap.
int CompactionThreshold() {
@@ -2440,7 +2469,7 @@ class LargeObjectSpace : public Space {
virtual ~LargeObjectSpace() {}
// Initializes internal data structures.
- bool Setup();
+ bool SetUp();
// Releases internal resources, frees objects in this space.
void TearDown();
@@ -2475,9 +2504,9 @@ class LargeObjectSpace : public Space {
// space, may be slow.
MaybeObject* FindObject(Address a);
- // Finds a large object page containing the given pc, returns NULL
+ // Finds a large object page containing the given address, returns NULL
// if such a page doesn't exist.
- LargePage* FindPageContainingPc(Address pc);
+ LargePage* FindPage(Address a);
// Frees unmarked objects.
void FreeUnmarkedObjects();
@@ -2512,6 +2541,8 @@ class LargeObjectSpace : public Space {
intptr_t size_; // allocated bytes
int page_count_; // number of chunks
intptr_t objects_size_; // size of objects
+ // Map MemoryChunk::kAlignment-aligned chunks to large pages covering them
+ HashMap chunk_map_;
friend class LargeObjectIterator;
diff --git a/src/3rdparty/v8/src/store-buffer.cc b/src/3rdparty/v8/src/store-buffer.cc
index 7c8b5f2..3852155 100644
--- a/src/3rdparty/v8/src/store-buffer.cc
+++ b/src/3rdparty/v8/src/store-buffer.cc
@@ -41,6 +41,7 @@ StoreBuffer::StoreBuffer(Heap* heap)
old_start_(NULL),
old_limit_(NULL),
old_top_(NULL),
+ old_reserved_limit_(NULL),
old_buffer_is_sorted_(false),
old_buffer_is_filtered_(false),
during_gc_(false),
@@ -48,21 +49,37 @@ StoreBuffer::StoreBuffer(Heap* heap)
callback_(NULL),
may_move_store_buffer_entries_(true),
virtual_memory_(NULL),
- hash_map_1_(NULL),
- hash_map_2_(NULL) {
+ hash_set_1_(NULL),
+ hash_set_2_(NULL),
+ hash_sets_are_empty_(true) {
}
-void StoreBuffer::Setup() {
+void StoreBuffer::SetUp() {
virtual_memory_ = new VirtualMemory(kStoreBufferSize * 3);
uintptr_t start_as_int =
reinterpret_cast<uintptr_t>(virtual_memory_->address());
start_ =
reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2));
- limit_ = start_ + (kStoreBufferSize / sizeof(*start_));
-
- old_top_ = old_start_ = new Address[kOldStoreBufferLength];
- old_limit_ = old_start_ + kOldStoreBufferLength;
+ limit_ = start_ + (kStoreBufferSize / kPointerSize);
+
+ old_virtual_memory_ =
+ new VirtualMemory(kOldStoreBufferLength * kPointerSize);
+ old_top_ = old_start_ =
+ reinterpret_cast<Address*>(old_virtual_memory_->address());
+ // Don't know the alignment requirements of the OS, but it is certainly not
+ // less than 0xfff.
+ ASSERT((reinterpret_cast<uintptr_t>(old_start_) & 0xfff) == 0);
+ int initial_length = static_cast<int>(OS::CommitPageSize() / kPointerSize);
+ ASSERT(initial_length > 0);
+ ASSERT(initial_length <= kOldStoreBufferLength);
+ old_limit_ = old_start_ + initial_length;
+ old_reserved_limit_ = old_start_ + kOldStoreBufferLength;
+
+ CHECK(old_virtual_memory_->Commit(
+ reinterpret_cast<void*>(old_start_),
+ (old_limit_ - old_start_) * kPointerSize,
+ false));
ASSERT(reinterpret_cast<Address>(start_) >= virtual_memory_->address());
ASSERT(reinterpret_cast<Address>(limit_) >= virtual_memory_->address());
@@ -76,24 +93,25 @@ void StoreBuffer::Setup() {
ASSERT((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) ==
0);
- virtual_memory_->Commit(reinterpret_cast<Address>(start_),
- kStoreBufferSize,
- false); // Not executable.
+ CHECK(virtual_memory_->Commit(reinterpret_cast<Address>(start_),
+ kStoreBufferSize,
+ false)); // Not executable.
heap_->public_set_store_buffer_top(start_);
- hash_map_1_ = new uintptr_t[kHashMapLength];
- hash_map_2_ = new uintptr_t[kHashMapLength];
+ hash_set_1_ = new uintptr_t[kHashSetLength];
+ hash_set_2_ = new uintptr_t[kHashSetLength];
+ hash_sets_are_empty_ = false;
- ZapHashTables();
+ ClearFilteringHashSets();
}
void StoreBuffer::TearDown() {
delete virtual_memory_;
- delete[] hash_map_1_;
- delete[] hash_map_2_;
- delete[] old_start_;
- old_start_ = old_top_ = old_limit_ = NULL;
+ delete old_virtual_memory_;
+ delete[] hash_set_1_;
+ delete[] hash_set_2_;
+ old_start_ = old_top_ = old_limit_ = old_reserved_limit_ = NULL;
start_ = limit_ = NULL;
heap_->public_set_store_buffer_top(start_);
}
@@ -132,7 +150,6 @@ static int CompareAddresses(const void* void_a, const void* void_b) {
void StoreBuffer::Uniq() {
- ASSERT(HashTablesAreZapped());
// Remove adjacent duplicates and cells that do not point at new space.
Address previous = NULL;
Address* write = old_start_;
@@ -150,7 +167,18 @@ void StoreBuffer::Uniq() {
}
-void StoreBuffer::HandleFullness() {
+void StoreBuffer::EnsureSpace(intptr_t space_needed) {
+ while (old_limit_ - old_top_ < space_needed &&
+ old_limit_ < old_reserved_limit_) {
+ size_t grow = old_limit_ - old_start_; // Double size.
+ CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_limit_),
+ grow * kPointerSize,
+ false));
+ old_limit_ += grow;
+ }
+
+ if (old_limit_ - old_top_ >= space_needed) return;
+
if (old_buffer_is_filtered_) return;
ASSERT(may_move_store_buffer_entries_);
Compact();
@@ -245,13 +273,16 @@ void StoreBuffer::Filter(int flag) {
}
}
old_top_ = new_top;
+
+ // Filtering hash sets are inconsistent with the store buffer after this
+ // operation.
+ ClearFilteringHashSets();
}
void StoreBuffer::SortUniq() {
Compact();
if (old_buffer_is_sorted_) return;
- ZapHashTables();
qsort(reinterpret_cast<void*>(old_start_),
old_top_ - old_start_,
sizeof(*old_top_),
@@ -259,6 +290,10 @@ void StoreBuffer::SortUniq() {
Uniq();
old_buffer_is_sorted_ = true;
+
+ // Filtering hash sets are inconsistent with the store buffer after this
+ // operation.
+ ClearFilteringHashSets();
}
@@ -274,35 +309,23 @@ bool StoreBuffer::PrepareForIteration() {
if (page_has_scan_on_scavenge_flag) {
Filter(MemoryChunk::SCAN_ON_SCAVENGE);
}
- ZapHashTables();
+
+ // Filtering hash sets are inconsistent with the store buffer after
+ // iteration.
+ ClearFilteringHashSets();
+
return page_has_scan_on_scavenge_flag;
}
#ifdef DEBUG
void StoreBuffer::Clean() {
- ZapHashTables();
+ ClearFilteringHashSets();
Uniq(); // Also removes things that no longer point to new space.
CheckForFullBuffer();
}
-static bool Zapped(char* start, int size) {
- for (int i = 0; i < size; i++) {
- if (start[i] != 0) return false;
- }
- return true;
-}
-
-
-bool StoreBuffer::HashTablesAreZapped() {
- return Zapped(reinterpret_cast<char*>(hash_map_1_),
- sizeof(uintptr_t) * kHashMapLength) &&
- Zapped(reinterpret_cast<char*>(hash_map_2_),
- sizeof(uintptr_t) * kHashMapLength);
-}
-
-
static Address* in_store_buffer_1_element_cache = NULL;
@@ -330,18 +353,21 @@ bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) {
#endif
-void StoreBuffer::ZapHashTables() {
- memset(reinterpret_cast<void*>(hash_map_1_),
- 0,
- sizeof(uintptr_t) * kHashMapLength);
- memset(reinterpret_cast<void*>(hash_map_2_),
- 0,
- sizeof(uintptr_t) * kHashMapLength);
+void StoreBuffer::ClearFilteringHashSets() {
+ if (!hash_sets_are_empty_) {
+ memset(reinterpret_cast<void*>(hash_set_1_),
+ 0,
+ sizeof(uintptr_t) * kHashSetLength);
+ memset(reinterpret_cast<void*>(hash_set_2_),
+ 0,
+ sizeof(uintptr_t) * kHashSetLength);
+ hash_sets_are_empty_ = true;
+ }
}
void StoreBuffer::GCPrologue() {
- ZapHashTables();
+ ClearFilteringHashSets();
during_gc_ = true;
}
@@ -427,14 +453,14 @@ void StoreBuffer::FindPointersToNewSpaceInRegion(
// Compute start address of the first map following given addr.
static inline Address MapStartAlign(Address addr) {
- Address page = Page::FromAddress(addr)->ObjectAreaStart();
+ Address page = Page::FromAddress(addr)->area_start();
return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
}
// Compute end address of the first map preceding given addr.
static inline Address MapEndAlign(Address addr) {
- Address page = Page::FromAllocationTop(addr)->ObjectAreaStart();
+ Address page = Page::FromAllocationTop(addr)->area_start();
return page + ((addr - page) / Map::kSize * Map::kSize);
}
@@ -497,8 +523,8 @@ void StoreBuffer::FindPointersToNewSpaceOnPage(
Page* page,
RegionCallback region_callback,
ObjectSlotCallback slot_callback) {
- Address visitable_start = page->ObjectAreaStart();
- Address end_of_page = page->ObjectAreaEnd();
+ Address visitable_start = page->area_start();
+ Address end_of_page = page->area_end();
Address visitable_end = visitable_start;
@@ -645,14 +671,13 @@ void StoreBuffer::Compact() {
// the worst case (compaction doesn't eliminate any pointers).
ASSERT(top <= limit_);
heap_->public_set_store_buffer_top(start_);
- if (top - start_ > old_limit_ - old_top_) {
- HandleFullness();
- }
+ EnsureSpace(top - start_);
ASSERT(may_move_store_buffer_entries_);
// Goes through the addresses in the store buffer attempting to remove
// duplicates. In the interest of speed this is a lossy operation. Some
- // duplicates will remain. We have two hash tables with different hash
+ // duplicates will remain. We have two hash sets with different hash
// functions to reduce the number of unnecessary clashes.
+ hash_sets_are_empty_ = false; // Hash sets are in use.
for (Address* current = start_; current < top; current++) {
ASSERT(!heap_->cell_space()->Contains(*current));
ASSERT(!heap_->code_space()->Contains(*current));
@@ -661,21 +686,21 @@ void StoreBuffer::Compact() {
// Shift out the last bits including any tags.
int_addr >>= kPointerSizeLog2;
int hash1 =
- ((int_addr ^ (int_addr >> kHashMapLengthLog2)) & (kHashMapLength - 1));
- if (hash_map_1_[hash1] == int_addr) continue;
- int hash2 =
- ((int_addr - (int_addr >> kHashMapLengthLog2)) & (kHashMapLength - 1));
- hash2 ^= hash2 >> (kHashMapLengthLog2 * 2);
- if (hash_map_2_[hash2] == int_addr) continue;
- if (hash_map_1_[hash1] == 0) {
- hash_map_1_[hash1] = int_addr;
- } else if (hash_map_2_[hash2] == 0) {
- hash_map_2_[hash2] = int_addr;
+ ((int_addr ^ (int_addr >> kHashSetLengthLog2)) & (kHashSetLength - 1));
+ if (hash_set_1_[hash1] == int_addr) continue;
+ uintptr_t hash2 = (int_addr - (int_addr >> kHashSetLengthLog2));
+ hash2 ^= hash2 >> (kHashSetLengthLog2 * 2);
+ hash2 &= (kHashSetLength - 1);
+ if (hash_set_2_[hash2] == int_addr) continue;
+ if (hash_set_1_[hash1] == 0) {
+ hash_set_1_[hash1] = int_addr;
+ } else if (hash_set_2_[hash2] == 0) {
+ hash_set_2_[hash2] = int_addr;
} else {
// Rather than slowing down we just throw away some entries. This will
// cause some duplicates to remain undetected.
- hash_map_1_[hash1] = int_addr;
- hash_map_2_[hash2] = 0;
+ hash_set_1_[hash1] = int_addr;
+ hash_set_2_[hash2] = 0;
}
old_buffer_is_sorted_ = false;
old_buffer_is_filtered_ = false;
@@ -688,9 +713,7 @@ void StoreBuffer::Compact() {
void StoreBuffer::CheckForFullBuffer() {
- if (old_limit_ - old_top_ < kStoreBufferSize * 2) {
- HandleFullness();
- }
+ EnsureSpace(kStoreBufferSize * 2);
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/store-buffer.h b/src/3rdparty/v8/src/store-buffer.h
index 61b97d9..951a9ca 100644
--- a/src/3rdparty/v8/src/store-buffer.h
+++ b/src/3rdparty/v8/src/store-buffer.h
@@ -54,7 +54,7 @@ class StoreBuffer {
inline Address TopAddress();
- void Setup();
+ void SetUp();
void TearDown();
// This is used by the mutator to enter addresses into the store buffer.
@@ -81,12 +81,12 @@ class StoreBuffer {
// surviving old-to-new pointers into the store buffer to rebuild it.
void IteratePointersToNewSpace(ObjectSlotCallback callback);
- static const int kStoreBufferOverflowBit = 1 << 16;
+ static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2);
static const int kStoreBufferSize = kStoreBufferOverflowBit;
static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address);
static const int kOldStoreBufferLength = kStoreBufferLength * 16;
- static const int kHashMapLengthLog2 = 12;
- static const int kHashMapLength = 1 << kHashMapLengthLog2;
+ static const int kHashSetLengthLog2 = 12;
+ static const int kHashSetLength = 1 << kHashSetLengthLog2;
void Compact();
@@ -109,7 +109,7 @@ class StoreBuffer {
// been promoted. Rebuilds the store buffer completely if it overflowed.
void SortUniq();
- void HandleFullness();
+ void EnsureSpace(intptr_t space_needed);
void Verify();
bool PrepareForIteration();
@@ -134,6 +134,8 @@ class StoreBuffer {
Address* old_start_;
Address* old_limit_;
Address* old_top_;
+ Address* old_reserved_limit_;
+ VirtualMemory* old_virtual_memory_;
bool old_buffer_is_sorted_;
bool old_buffer_is_filtered_;
@@ -146,13 +148,18 @@ class StoreBuffer {
bool may_move_store_buffer_entries_;
VirtualMemory* virtual_memory_;
- uintptr_t* hash_map_1_;
- uintptr_t* hash_map_2_;
+
+ // Two hash sets used for filtering.
+ // If address is in the hash set then it is guaranteed to be in the
+ // old part of the store buffer.
+ uintptr_t* hash_set_1_;
+ uintptr_t* hash_set_2_;
+ bool hash_sets_are_empty_;
+
+ void ClearFilteringHashSets();
void CheckForFullBuffer();
void Uniq();
- void ZapHashTables();
- bool HashTablesAreZapped();
void ExemptPopularPages(int prime_sample_step, int threshold);
void FindPointersToNewSpaceInRegion(Address start,
diff --git a/src/3rdparty/v8/src/string-search.h b/src/3rdparty/v8/src/string-search.h
index 1223db0..8c3456a 100644
--- a/src/3rdparty/v8/src/string-search.h
+++ b/src/3rdparty/v8/src/string-search.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -242,9 +242,9 @@ int StringSearch<PatternChar, SubjectChar>::SingleCharSearch(
template <typename PatternChar, typename SubjectChar>
-static inline bool CharCompare(const PatternChar* pattern,
- const SubjectChar* subject,
- int length) {
+inline bool CharCompare(const PatternChar* pattern,
+ const SubjectChar* subject,
+ int length) {
ASSERT(length > 0);
int pos = 0;
do {
@@ -369,6 +369,10 @@ void StringSearch<PatternChar, SubjectChar>::PopulateBoyerMooreTable() {
shift_table[pattern_length] = 1;
suffix_table[pattern_length] = pattern_length + 1;
+ if (pattern_length <= start) {
+ return;
+ }
+
// Find suffixes.
PatternChar last_char = pattern[pattern_length - 1];
int suffix = pattern_length + 1;
@@ -555,10 +559,10 @@ int StringSearch<PatternChar, SubjectChar>::InitialSearch(
// object should be constructed once and the Search function then called
// for each search.
template <typename SubjectChar, typename PatternChar>
-static int SearchString(Isolate* isolate,
- Vector<const SubjectChar> subject,
- Vector<const PatternChar> pattern,
- int start_index) {
+int SearchString(Isolate* isolate,
+ Vector<const SubjectChar> subject,
+ Vector<const PatternChar> pattern,
+ int start_index) {
StringSearch<PatternChar, SubjectChar> search(isolate, pattern);
return search.Search(subject, start_index);
}
diff --git a/src/3rdparty/v8/src/string-stream.cc b/src/3rdparty/v8/src/string-stream.cc
index 8086cf9..35f7be5 100644
--- a/src/3rdparty/v8/src/string-stream.cc
+++ b/src/3rdparty/v8/src/string-stream.cc
@@ -350,29 +350,24 @@ void StringStream::PrintUsingMap(JSObject* js_object) {
}
DescriptorArray* descs = map->instance_descriptors();
for (int i = 0; i < descs->number_of_descriptors(); i++) {
- switch (descs->GetType(i)) {
- case FIELD: {
- Object* key = descs->GetKey(i);
- if (key->IsString() || key->IsNumber()) {
- int len = 3;
- if (key->IsString()) {
- len = String::cast(key)->length();
- }
- for (; len < 18; len++)
- Put(' ');
- if (key->IsString()) {
- Put(String::cast(key));
- } else {
- key->ShortPrint();
- }
- Add(": ");
- Object* value = js_object->FastPropertyAt(descs->GetFieldIndex(i));
- Add("%o\n", value);
+ if (descs->GetType(i) == FIELD) {
+ Object* key = descs->GetKey(i);
+ if (key->IsString() || key->IsNumber()) {
+ int len = 3;
+ if (key->IsString()) {
+ len = String::cast(key)->length();
}
+ for (; len < 18; len++)
+ Put(' ');
+ if (key->IsString()) {
+ Put(String::cast(key));
+ } else {
+ key->ShortPrint();
+ }
+ Add(": ");
+ Object* value = js_object->FastPropertyAt(descs->GetFieldIndex(i));
+ Add("%o\n", value);
}
- break;
- default:
- break;
}
}
}
diff --git a/src/3rdparty/v8/src/string.js b/src/3rdparty/v8/src/string.js
index be955c8..a464f7f 100644
--- a/src/3rdparty/v8/src/string.js
+++ b/src/3rdparty/v8/src/string.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2009 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,8 +28,8 @@
// This file relies on the fact that the following declaration has been made
// in runtime.js:
-// const $String = global.String;
-// const $NaN = 0/0;
+// var $String = global.String;
+// var $NaN = 0/0;
// Set the String function and constructor.
@@ -46,16 +46,18 @@
// ECMA-262 section 15.5.4.2
function StringToString() {
- if (!IS_STRING(this) && !IS_STRING_WRAPPER(this))
+ if (!IS_STRING(this) && !IS_STRING_WRAPPER(this)) {
throw new $TypeError('String.prototype.toString is not generic');
+ }
return %_ValueOf(this);
}
// ECMA-262 section 15.5.4.3
function StringValueOf() {
- if (!IS_STRING(this) && !IS_STRING_WRAPPER(this))
+ if (!IS_STRING(this) && !IS_STRING_WRAPPER(this)) {
throw new $TypeError('String.prototype.valueOf is not generic');
+ }
return %_ValueOf(this);
}
@@ -91,7 +93,8 @@ function StringCharCodeAt(pos) {
// ECMA-262, section 15.5.4.6
function StringConcat() {
if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
- throw MakeTypeError("called_on_null_or_undefined", ["String.prototype.concat"]);
+ throw MakeTypeError("called_on_null_or_undefined",
+ ["String.prototype.concat"]);
}
var len = %_ArgumentsLength();
var this_as_string = TO_STRING_INLINE(this);
@@ -186,7 +189,9 @@ function StringMatch(regexp) {
if (!regexp.global) return RegExpExecNoTests(regexp, subject, 0);
%_Log('regexp', 'regexp-match,%0S,%1r', [subject, regexp]);
// lastMatchInfo is defined in regexp.js.
- return %StringMatch(subject, regexp, lastMatchInfo);
+ var result = %StringMatch(subject, regexp, lastMatchInfo);
+ if (result !== null) lastMatchInfoOverride = null;
+ return result;
}
// Non-regexp argument.
regexp = new $RegExp(regexp);
@@ -241,6 +246,15 @@ function StringReplace(search, replace) {
// Convert the search argument to a string and search for it.
search = TO_STRING_INLINE(search);
+ if (search.length == 1 &&
+ subject.length > 0xFF &&
+ IS_STRING(replace) &&
+ %StringIndexOf(replace, '$', 0) < 0) {
+ // Searching by traversing a cons string tree and replace with cons of
+ // slices works only when the replaced string is a single character, being
+ // replaced by a simple string and only pays off for long strings.
+ return %StringReplaceOneCharWithString(subject, search, replace);
+ }
var start = %StringIndexOf(subject, search, 0);
if (start < 0) return subject;
var end = start + search.length;
@@ -358,7 +372,7 @@ function ExpandReplacement(string, subject, matchInfo, builder) {
builder_elements.push(SubString(string, position, next));
}
}
-};
+}
// Compute the string of a given regular expression capture.
@@ -371,7 +385,7 @@ function CaptureString(string, lastCaptureInfo, index) {
if (start < 0) return;
var end = lastCaptureInfo[CAPTURE(scaled + 1)];
return SubString(string, start, end);
-};
+}
// Add the string of a given regular expression capture to the
@@ -384,7 +398,7 @@ function addCaptureString(builder, matchInfo, index) {
if (start < 0) return;
var end = matchInfo[CAPTURE(scaled + 1)];
builder.addSpecialSlice(start, end);
-};
+}
// TODO(lrn): This array will survive indefinitely if replace is never
// called again. However, it will be empty, since the contents are cleared
@@ -531,32 +545,37 @@ function StringSlice(start, end) {
var s_len = s.length;
var start_i = TO_INTEGER(start);
var end_i = s_len;
- if (end !== void 0)
+ if (end !== void 0) {
end_i = TO_INTEGER(end);
+ }
if (start_i < 0) {
start_i += s_len;
- if (start_i < 0)
+ if (start_i < 0) {
start_i = 0;
+ }
} else {
- if (start_i > s_len)
- start_i = s_len;
+ if (start_i > s_len) {
+ return '';
+ }
}
if (end_i < 0) {
end_i += s_len;
- if (end_i < 0)
- end_i = 0;
+ if (end_i < 0) {
+ return '';
+ }
} else {
- if (end_i > s_len)
+ if (end_i > s_len) {
end_i = s_len;
+ }
}
- var num_c = end_i - start_i;
- if (num_c < 0)
- num_c = 0;
+ if (end_i <= start_i) {
+ return '';
+ }
- return SubString(s, start_i, start_i + num_c);
+ return SubString(s, start_i, end_i);
}
@@ -570,11 +589,8 @@ function StringSplit(separator, limit) {
limit = (IS_UNDEFINED(limit)) ? 0xffffffff : TO_UINT32(limit);
// ECMA-262 says that if separator is undefined, the result should
- // be an array of size 1 containing the entire string. SpiderMonkey
- // and KJS have this behavior only when no separator is given. If
- // undefined is explicitly given, they convert it to a string and
- // use that. We do as SpiderMonkey and KJS.
- if (%_ArgumentsLength() === 0) {
+ // be an array of size 1 containing the entire string.
+ if (IS_UNDEFINED(separator)) {
return [subject];
}
@@ -596,6 +612,12 @@ function StringSplit(separator, limit) {
if (limit === 0) return [];
+ // Separator is a regular expression.
+ return StringSplitOnRegExp(subject, separator, limit, length);
+}
+
+
+function StringSplitOnRegExp(subject, separator, limit, length) {
%_Log('regexp', 'regexp-split,%0S,%1r', [subject, separator]);
if (length === 0) {
@@ -692,7 +714,7 @@ function StringSubstring(start, end) {
}
}
- return (start_i + 1 == end_i
+ return ((start_i + 1 == end_i)
? %_StringCharAt(s, start_i)
: %_SubString(s, start_i, end_i));
}
@@ -736,7 +758,7 @@ function StringSubstr(start, n) {
var end = start + len;
if (end > s.length) end = s.length;
- return (start + 1 == end
+ return ((start + 1 == end)
? %_StringCharAt(s, start)
: %_SubString(s, start, end));
}
@@ -836,7 +858,7 @@ function HtmlEscape(str) {
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
-};
+}
// Compatibility support for KJS.
@@ -957,7 +979,7 @@ function SetUpString() {
// Set up the non-enumerable functions on the String prototype object.
- InstallFunctionsOnHiddenPrototype($String.prototype, DONT_ENUM, $Array(
+ InstallFunctions($String.prototype, DONT_ENUM, $Array(
"valueOf", StringValueOf,
"toString", StringToString,
"charAt", StringCharAt,
diff --git a/src/3rdparty/v8/src/stub-cache.cc b/src/3rdparty/v8/src/stub-cache.cc
index 9e7fe16..3371b1b 100644
--- a/src/3rdparty/v8/src/stub-cache.cc
+++ b/src/3rdparty/v8/src/stub-cache.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -45,26 +45,13 @@ namespace internal {
StubCache::StubCache(Isolate* isolate) : isolate_(isolate) {
ASSERT(isolate == Isolate::Current());
- memset(primary_, 0, sizeof(primary_[0]) * StubCache::kPrimaryTableSize);
- memset(secondary_, 0, sizeof(secondary_[0]) * StubCache::kSecondaryTableSize);
}
-void StubCache::Initialize(bool create_heap_objects) {
+void StubCache::Initialize() {
ASSERT(IsPowerOf2(kPrimaryTableSize));
ASSERT(IsPowerOf2(kSecondaryTableSize));
- if (create_heap_objects) {
- HandleScope scope;
- Code* empty = isolate_->builtins()->builtin(Builtins::kIllegal);
- for (int i = 0; i < kPrimaryTableSize; i++) {
- primary_[i].key = heap()->empty_string();
- primary_[i].value = empty;
- }
- for (int j = 0; j < kSecondaryTableSize; j++) {
- secondary_[j].key = heap()->empty_string();
- secondary_[j].value = empty;
- }
- }
+ Clear();
}
@@ -90,14 +77,15 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
// Compute the primary entry.
int primary_offset = PrimaryOffset(name, flags, map);
Entry* primary = entry(primary_, primary_offset);
- Code* hit = primary->value;
+ Code* old_code = primary->value;
// If the primary entry has useful data in it, we retire it to the
// secondary cache before overwriting it.
- if (hit != isolate_->builtins()->builtin(Builtins::kIllegal)) {
- Code::Flags primary_flags = Code::RemoveTypeFromFlags(hit->flags());
- int secondary_offset =
- SecondaryOffset(primary->key, primary_flags, primary_offset);
+ if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
+ Map* old_map = primary->map;
+ Code::Flags old_flags = Code::RemoveTypeFromFlags(old_code->flags());
+ int seed = PrimaryOffset(primary->key, old_flags, old_map);
+ int secondary_offset = SecondaryOffset(primary->key, old_flags, seed);
Entry* secondary = entry(secondary_, secondary_offset);
*secondary = *primary;
}
@@ -105,6 +93,8 @@ Code* StubCache::Set(String* name, Map* map, Code* code) {
// Update primary cache.
primary->key = name;
primary->value = code;
+ primary->map = map;
+ isolate()->counters()->megamorphic_stub_cache_updates()->Increment();
return code;
}
@@ -184,7 +174,7 @@ Handle<Code> StubCache::ComputeLoadCallback(Handle<String> name,
Handle<Code> StubCache::ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::LOAD_IC, CONSTANT_FUNCTION);
@@ -266,7 +256,7 @@ Handle<Code> StubCache::ComputeKeyedLoadField(Handle<String> name,
Handle<Code> StubCache::ComputeKeyedLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
ASSERT(IC::GetCodeCacheForObject(*receiver, *holder) == OWN_MAP);
Code::Flags flags =
Code::ComputeMonomorphicFlags(Code::KEYED_LOAD_IC, CONSTANT_FUNCTION);
@@ -397,12 +387,16 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
Handle<JSObject> receiver,
KeyedIC::StubKind stub_kind,
StrictModeFlag strict_mode) {
+ KeyedAccessGrowMode grow_mode =
+ KeyedIC::GetGrowModeFromStubKind(stub_kind);
+ Code::ExtraICState extra_state =
+ Code::ComputeExtraICState(grow_mode, strict_mode);
Code::Flags flags =
Code::ComputeMonomorphicFlags(
stub_kind == KeyedIC::LOAD ? Code::KEYED_LOAD_IC
: Code::KEYED_STORE_IC,
NORMAL,
- strict_mode);
+ extra_state);
Handle<String> name;
switch (stub_kind) {
case KeyedIC::LOAD:
@@ -411,6 +405,9 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
case KeyedIC::STORE_NO_TRANSITION:
name = isolate()->factory()->KeyedStoreElementMonomorphic_symbol();
break;
+ case KeyedIC::STORE_AND_GROW_NO_TRANSITION:
+ name = isolate()->factory()->KeyedStoreAndGrowElementMonomorphic_symbol();
+ break;
default:
UNREACHABLE();
break;
@@ -426,8 +423,15 @@ Handle<Code> StubCache::ComputeKeyedLoadOrStoreElement(
code = compiler.CompileLoadElement(receiver_map);
break;
}
+ case KeyedIC::STORE_AND_GROW_NO_TRANSITION: {
+ KeyedStoreStubCompiler compiler(isolate_, strict_mode,
+ ALLOW_JSARRAY_GROWTH);
+ code = compiler.CompileStoreElement(receiver_map);
+ break;
+ }
case KeyedIC::STORE_NO_TRANSITION: {
- KeyedStoreStubCompiler compiler(isolate_, strict_mode);
+ KeyedStoreStubCompiler compiler(isolate_, strict_mode,
+ DO_NOT_ALLOW_JSARRAY_GROWTH);
code = compiler.CompileStoreElement(receiver_map);
break;
}
@@ -519,7 +523,8 @@ Handle<Code> StubCache::ComputeKeyedStoreField(Handle<String> name,
Handle<Object> probe(receiver->map()->FindInCodeCache(*name, flags));
if (probe->IsCode()) return Handle<Code>::cast(probe);
- KeyedStoreStubCompiler compiler(isolate(), strict_mode);
+ KeyedStoreStubCompiler compiler(isolate(), strict_mode,
+ DO_NOT_ALLOW_JSARRAY_GROWTH);
Handle<Code> code =
compiler.CompileStoreField(receiver, field_index, transition, name);
PROFILE(isolate_, CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name));
@@ -678,11 +683,10 @@ Handle<Code> StubCache::ComputeCallGlobal(int argc,
static void FillCache(Isolate* isolate, Handle<Code> code) {
- Handle<NumberDictionary> dictionary =
- NumberDictionarySet(isolate->factory()->non_monomorphic_cache(),
- code->flags(),
- code,
- PropertyDetails(NONE, NORMAL));
+ Handle<UnseededNumberDictionary> dictionary =
+ UnseededNumberDictionary::Set(isolate->factory()->non_monomorphic_cache(),
+ code->flags(),
+ code);
isolate->heap()->public_set_non_monomorphic_cache(*dictionary);
}
@@ -697,7 +701,7 @@ Code* StubCache::FindCallInitialize(int argc,
Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
// Use raw_unchecked... so we don't get assert failures during GC.
- NumberDictionary* dictionary =
+ UnseededNumberDictionary* dictionary =
isolate()->heap()->raw_unchecked_non_monomorphic_cache();
int entry = dictionary->FindEntry(isolate(), flags);
ASSERT(entry != -1);
@@ -716,7 +720,8 @@ Handle<Code> StubCache::ComputeCallInitialize(int argc,
CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
Code::Flags flags =
Code::ComputeFlags(kind, UNINITIALIZED, extra_state, NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -744,7 +749,8 @@ Handle<Code> StubCache::ComputeCallPreMonomorphic(
Code::ExtraICState extra_state) {
Code::Flags flags =
Code::ComputeFlags(kind, PREMONOMORPHIC, extra_state, NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -757,16 +763,16 @@ Handle<Code> StubCache::ComputeCallPreMonomorphic(
Handle<Code> StubCache::ComputeCallNormal(int argc,
Code::Kind kind,
- Code::ExtraICState extra_state,
- bool has_qml_global_receiver) {
+ Code::ExtraICState extra_state) {
Code::Flags flags =
Code::ComputeFlags(kind, MONOMORPHIC, extra_state, NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
StubCompiler compiler(isolate_);
- Handle<Code> code = compiler.CompileCallNormal(flags, has_qml_global_receiver);
+ Handle<Code> code = compiler.CompileCallNormal(flags);
FillCache(isolate_, code);
return code;
}
@@ -777,7 +783,8 @@ Handle<Code> StubCache::ComputeCallArguments(int argc, Code::Kind kind) {
Code::Flags flags =
Code::ComputeFlags(kind, MEGAMORPHIC, Code::kNoExtraICState,
NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -795,7 +802,8 @@ Handle<Code> StubCache::ComputeCallMegamorphic(
Code::Flags flags =
Code::ComputeFlags(kind, MEGAMORPHIC, extra_state,
NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -814,7 +822,8 @@ Handle<Code> StubCache::ComputeCallMiss(int argc,
Code::Flags flags =
Code::ComputeFlags(kind, MONOMORPHIC_PROTOTYPE_FAILURE, extra_state,
NORMAL, argc, OWN_MAP);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -833,7 +842,8 @@ Handle<Code> StubCache::ComputeCallDebugBreak(int argc,
Code::Flags flags =
Code::ComputeFlags(kind, DEBUG_BREAK, Code::kNoExtraICState,
NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -851,7 +861,8 @@ Handle<Code> StubCache::ComputeCallDebugPrepareStepIn(int argc,
Code::Flags flags =
Code::ComputeFlags(kind, DEBUG_PREPARE_STEP_IN, Code::kNoExtraICState,
NORMAL, argc);
- Handle<NumberDictionary> cache = isolate_->factory()->non_monomorphic_cache();
+ Handle<UnseededNumberDictionary> cache =
+ isolate_->factory()->non_monomorphic_cache();
int entry = cache->FindEntry(isolate_, flags);
if (entry != -1) return Handle<Code>(Code::cast(cache->ValueAt(entry)));
@@ -878,7 +889,8 @@ void StubCache::Clear() {
void StubCache::CollectMatchingMaps(SmallMapList* types,
String* name,
- Code::Flags flags) {
+ Code::Flags flags,
+ Handle<Context> global_context) {
for (int i = 0; i < kPrimaryTableSize; i++) {
if (primary_[i].key == name) {
Map* map = primary_[i].value->FindFirstMap();
@@ -887,7 +899,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
if (map == NULL) continue;
int offset = PrimaryOffset(name, flags, map);
- if (entry(primary_, offset) == &primary_[i]) {
+ if (entry(primary_, offset) == &primary_[i] &&
+ !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
types->Add(Handle<Map>(map));
}
}
@@ -910,7 +923,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types,
// Lookup in secondary table and add matches.
int offset = SecondaryOffset(name, flags, primary_offset);
- if (entry(secondary_, offset) == &secondary_[i]) {
+ if (entry(secondary_, offset) == &secondary_[i] &&
+ !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) {
types->Add(Handle<Map>(map));
}
}
@@ -1163,15 +1177,13 @@ Handle<Code> StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
}
-Handle<Code> StubCompiler::CompileCallNormal(Code::Flags flags, bool has_qml_global_receiver) {
+Handle<Code> StubCompiler::CompileCallNormal(Code::Flags flags) {
int argc = Code::ExtractArgumentsCountFromFlags(flags);
Code::Kind kind = Code::ExtractKindFromFlags(flags);
if (kind == Code::CALL_IC) {
- // Call normal is always with a explict receiver,
- // or with an implicit qml global receiver.
+ // Call normal is always with a explict receiver.
ASSERT(!CallIC::Contextual::decode(
- Code::ExtractExtraICStateFromFlags(flags)) ||
- has_qml_global_receiver);
+ Code::ExtractExtraICStateFromFlags(flags)));
CallIC::GenerateNormal(masm(), argc);
} else {
KeyedCallIC::GenerateNormal(masm(), argc);
@@ -1342,8 +1354,10 @@ Handle<Code> StoreStubCompiler::GetCode(PropertyType type,
Handle<Code> KeyedStoreStubCompiler::GetCode(PropertyType type,
Handle<String> name,
InlineCacheState state) {
+ Code::ExtraICState extra_state =
+ Code::ComputeExtraICState(grow_mode_, strict_mode_);
Code::Flags flags =
- Code::ComputeFlags(Code::KEYED_STORE_IC, state, strict_mode_, type);
+ Code::ComputeFlags(Code::KEYED_STORE_IC, state, extra_state, type);
Handle<Code> code = GetCodeWithFlags(flags, name);
PROFILE(isolate(), CodeCreateEvent(Logger::KEYED_STORE_IC_TAG, *code, *name));
GDBJIT(AddCode(GDBJITInterface::KEYED_STORE_IC, *name, *code));
@@ -1445,13 +1459,13 @@ Handle<Code> ConstructStubCompiler::GetCode() {
CallOptimization::CallOptimization(LookupResult* lookup) {
- if (!lookup->IsProperty() ||
- !lookup->IsCacheable() ||
- lookup->type() != CONSTANT_FUNCTION) {
- Initialize(Handle<JSFunction>::null());
- } else {
+ if (lookup->IsFound() &&
+ lookup->IsCacheable() &&
+ lookup->type() == CONSTANT_FUNCTION) {
// We only optimize constant function calls.
Initialize(Handle<JSFunction>(lookup->GetConstantFunction()));
+ } else {
+ Initialize(Handle<JSFunction>::null());
}
}
diff --git a/src/3rdparty/v8/src/stub-cache.h b/src/3rdparty/v8/src/stub-cache.h
index 725c70c..29bdb61 100644
--- a/src/3rdparty/v8/src/stub-cache.h
+++ b/src/3rdparty/v8/src/stub-cache.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -69,9 +69,10 @@ class StubCache {
struct Entry {
String* key;
Code* value;
+ Map* map;
};
- void Initialize(bool create_heap_objects);
+ void Initialize();
// Computes the right stub matching. Inserts the result in the
@@ -92,7 +93,7 @@ class StubCache {
Handle<Code> ComputeLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value);
+ Handle<JSFunction> value);
Handle<Code> ComputeLoadInterceptor(Handle<String> name,
Handle<JSObject> receiver,
@@ -121,7 +122,7 @@ class StubCache {
Handle<Code> ComputeKeyedLoadConstant(Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value);
+ Handle<JSFunction> value);
Handle<Code> ComputeKeyedLoadInterceptor(Handle<String> name,
Handle<JSObject> receiver,
@@ -218,8 +219,7 @@ class StubCache {
Handle<Code> ComputeCallNormal(int argc,
Code::Kind kind,
- Code::ExtraICState state,
- bool has_qml_global_receiver);
+ Code::ExtraICState state);
Handle<Code> ComputeCallArguments(int argc, Code::Kind kind);
@@ -249,10 +249,11 @@ class StubCache {
// Collect all maps that match the name and flags.
void CollectMatchingMaps(SmallMapList* types,
String* name,
- Code::Flags flags);
+ Code::Flags flags,
+ Handle<Context> global_context);
// Generate code for probing the stub cache table.
- // Arguments extra and extra2 may be used to pass additional scratch
+ // Arguments extra, extra2 and extra3 may be used to pass additional scratch
// registers. Set to no_reg if not needed.
void GenerateProbe(MacroAssembler* masm,
Code::Flags flags,
@@ -260,7 +261,8 @@ class StubCache {
Register name,
Register scratch,
Register extra,
- Register extra2 = no_reg);
+ Register extra2 = no_reg,
+ Register extra3 = no_reg);
enum Table {
kPrimary,
@@ -274,6 +276,12 @@ class StubCache {
}
+ SCTableReference map_reference(StubCache::Table table) {
+ return SCTableReference(
+ reinterpret_cast<Address>(&first_entry(table)->map));
+ }
+
+
SCTableReference value_reference(StubCache::Table table) {
return SCTableReference(
reinterpret_cast<Address>(&first_entry(table)->value));
@@ -300,7 +308,16 @@ class StubCache {
RelocInfo::Mode mode,
Code::Kind kind);
- // Computes the hashed offsets for primary and secondary caches.
+ // The stub cache has a primary and secondary level. The two levels have
+ // different hashing algorithms in order to avoid simultaneous collisions
+ // in both caches. Unlike a probing strategy (quadratic or otherwise) the
+ // update strategy on updates is fairly clear and simple: Any existing entry
+ // in the primary cache is moved to the secondary cache, and secondary cache
+ // entries are overwritten.
+
+ // Hash algorithm for the primary table. This algorithm is replicated in
+ // assembler for every architecture. Returns an index into the table that
+ // is scaled by 1 << kHeapObjectTagSize.
static int PrimaryOffset(String* name, Code::Flags flags, Map* map) {
// This works well because the heap object tag size and the hash
// shift are equal. Shifting down the length field to get the
@@ -324,27 +341,36 @@ class StubCache {
return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
}
+ // Hash algorithm for the secondary table. This algorithm is replicated in
+ // assembler for every architecture. Returns an index into the table that
+ // is scaled by 1 << kHeapObjectTagSize.
static int SecondaryOffset(String* name, Code::Flags flags, int seed) {
// Use the seed from the primary cache in the secondary cache.
uint32_t string_low32bits =
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
- uint32_t key = seed - string_low32bits + flags;
+ // We always set the in_loop bit to zero when generating the lookup code
+ // so do it here too so the hash codes match.
+ uint32_t iflags =
+ (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
+ uint32_t key = (seed - string_low32bits) + iflags;
return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
}
// Compute the entry for a given offset in exactly the same way as
// we do in generated code. We generate an hash code that already
- // ends in String::kHashShift 0s. Then we shift it so it is a multiple
+ // ends in String::kHashShift 0s. Then we multiply it so it is a multiple
// of sizeof(Entry). This makes it easier to avoid making mistakes
// in the hashed offset computations.
static Entry* entry(Entry* table, int offset) {
- const int shift_amount = kPointerSizeLog2 + 1 - String::kHashShift;
+ const int multiplier = sizeof(*table) >> String::kHashShift;
return reinterpret_cast<Entry*>(
- reinterpret_cast<Address>(table) + (offset << shift_amount));
+ reinterpret_cast<Address>(table) + offset * multiplier);
}
- static const int kPrimaryTableSize = 2048;
- static const int kSecondaryTableSize = 512;
+ static const int kPrimaryTableBits = 11;
+ static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
+ static const int kSecondaryTableBits = 9;
+ static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
Entry primary_[kPrimaryTableSize];
Entry secondary_[kSecondaryTableSize];
@@ -384,7 +410,7 @@ class StubCompiler BASE_EMBEDDED {
// is extracted from the code flags.
Handle<Code> CompileCallInitialize(Code::Flags flags);
Handle<Code> CompileCallPreMonomorphic(Code::Flags flags);
- Handle<Code> CompileCallNormal(Code::Flags flags, bool has_qml_global_receiver);
+ Handle<Code> CompileCallNormal(Code::Flags flags);
Handle<Code> CompileCallMegamorphic(Code::Flags flags);
Handle<Code> CompileCallArguments(Code::Flags flags);
Handle<Code> CompileCallMiss(Code::Flags flags);
@@ -519,7 +545,7 @@ class StubCompiler BASE_EMBEDDED {
Register scratch1,
Register scratch2,
Register scratch3,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name,
Label* miss);
@@ -569,7 +595,7 @@ class LoadStubCompiler: public StubCompiler {
Handle<Code> CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name);
Handle<Code> CompileLoadInterceptor(Handle<JSObject> object,
@@ -604,7 +630,7 @@ class KeyedLoadStubCompiler: public StubCompiler {
Handle<Code> CompileLoadConstant(Handle<String> name,
Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value);
+ Handle<JSFunction> value);
Handle<Code> CompileLoadInterceptor(Handle<JSObject> object,
Handle<JSObject> holder,
@@ -668,8 +694,12 @@ class StoreStubCompiler: public StubCompiler {
class KeyedStoreStubCompiler: public StubCompiler {
public:
- KeyedStoreStubCompiler(Isolate* isolate, StrictModeFlag strict_mode)
- : StubCompiler(isolate), strict_mode_(strict_mode) { }
+ KeyedStoreStubCompiler(Isolate* isolate,
+ StrictModeFlag strict_mode,
+ KeyedAccessGrowMode grow_mode)
+ : StubCompiler(isolate),
+ strict_mode_(strict_mode),
+ grow_mode_(grow_mode) { }
Handle<Code> CompileStoreField(Handle<JSObject> object,
int index,
@@ -684,10 +714,12 @@ class KeyedStoreStubCompiler: public StubCompiler {
static void GenerateStoreFastElement(MacroAssembler* masm,
bool is_js_array,
- ElementsKind element_kind);
+ ElementsKind element_kind,
+ KeyedAccessGrowMode grow_mode);
static void GenerateStoreFastDoubleElement(MacroAssembler* masm,
- bool is_js_array);
+ bool is_js_array,
+ KeyedAccessGrowMode grow_mode);
static void GenerateStoreExternalArray(MacroAssembler* masm,
ElementsKind elements_kind);
@@ -700,6 +732,7 @@ class KeyedStoreStubCompiler: public StubCompiler {
InlineCacheState state = MONOMORPHIC);
StrictModeFlag strict_mode_;
+ KeyedAccessGrowMode grow_mode_;
};
diff --git a/src/3rdparty/v8/src/token.h b/src/3rdparty/v8/src/token.h
index 7a2156c..3036e55 100644
--- a/src/3rdparty/v8/src/token.h
+++ b/src/3rdparty/v8/src/token.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -170,6 +170,8 @@ namespace internal {
T(FUTURE_RESERVED_WORD, NULL, 0) \
T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \
K(CONST, "const", 0) \
+ K(EXPORT, "export", 0) \
+ K(IMPORT, "import", 0) \
K(LET, "let", 0) \
\
/* Illegal token - not able to scan. */ \
@@ -213,7 +215,7 @@ class Token {
return EQ <= op && op <= IN;
}
- static bool IsOrderedCompareOp(Value op) {
+ static bool IsOrderedRelationalCompareOp(Value op) {
return op == LT || op == LTE || op == GT || op == GTE;
}
diff --git a/src/3rdparty/v8/src/type-info.cc b/src/3rdparty/v8/src/type-info.cc
index afec71a..159be6a 100644
--- a/src/3rdparty/v8/src/type-info.cc
+++ b/src/3rdparty/v8/src/type-info.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -71,12 +71,23 @@ TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
Handle<Object> TypeFeedbackOracle::GetInfo(unsigned ast_id) {
int entry = dictionary_->FindEntry(ast_id);
- return entry != NumberDictionary::kNotFound
+ return entry != UnseededNumberDictionary::kNotFound
? Handle<Object>(dictionary_->ValueAt(entry))
: Handle<Object>::cast(isolate_->factory()->undefined_value());
}
+bool TypeFeedbackOracle::LoadIsUninitialized(Property* expr) {
+ Handle<Object> map_or_code = GetInfo(expr->id());
+ if (map_or_code->IsMap()) return false;
+ if (map_or_code->IsCode()) {
+ Handle<Code> code = Handle<Code>::cast(map_or_code);
+ return code->is_inline_cache_stub() && code->ic_state() == UNINITIALIZED;
+ }
+ return false;
+}
+
+
bool TypeFeedbackOracle::LoadIsMonomorphicNormal(Property* expr) {
Handle<Object> map_or_code = GetInfo(expr->id());
if (map_or_code->IsMap()) return true;
@@ -85,7 +96,8 @@ bool TypeFeedbackOracle::LoadIsMonomorphicNormal(Property* expr) {
return code->is_keyed_load_stub() &&
code->ic_state() == MONOMORPHIC &&
Code::ExtractTypeFromFlags(code->flags()) == NORMAL &&
- code->FindFirstMap() != NULL;
+ code->FindFirstMap() != NULL &&
+ !CanRetainOtherContext(code->FindFirstMap(), *global_context_);
}
return false;
}
@@ -109,9 +121,15 @@ bool TypeFeedbackOracle::StoreIsMonomorphicNormal(Expression* expr) {
if (map_or_code->IsMap()) return true;
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
+ bool allow_growth =
+ Code::GetKeyedAccessGrowMode(code->extra_ic_state()) ==
+ ALLOW_JSARRAY_GROWTH;
return code->is_keyed_store_stub() &&
+ !allow_growth &&
code->ic_state() == MONOMORPHIC &&
- Code::ExtractTypeFromFlags(code->flags()) == NORMAL;
+ Code::ExtractTypeFromFlags(code->flags()) == NORMAL &&
+ code->FindFirstMap() != NULL &&
+ !CanRetainOtherContext(code->FindFirstMap(), *global_context_);
}
return false;
}
@@ -122,7 +140,11 @@ bool TypeFeedbackOracle::StoreIsMegamorphicWithTypeInfo(Expression* expr) {
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Builtins* builtins = isolate_->builtins();
+ bool allow_growth =
+ Code::GetKeyedAccessGrowMode(code->extra_ic_state()) ==
+ ALLOW_JSARRAY_GROWTH;
return code->is_keyed_store_stub() &&
+ !allow_growth &&
*code != builtins->builtin(Builtins::kKeyedStoreIC_Generic) &&
*code != builtins->builtin(Builtins::kKeyedStoreIC_Generic_Strict) &&
code->ic_state() == MEGAMORPHIC;
@@ -137,6 +159,26 @@ bool TypeFeedbackOracle::CallIsMonomorphic(Call* expr) {
}
+bool TypeFeedbackOracle::CallNewIsMonomorphic(CallNew* expr) {
+ Handle<Object> value = GetInfo(expr->id());
+ return value->IsJSFunction();
+}
+
+
+bool TypeFeedbackOracle::ObjectLiteralStoreIsMonomorphic(
+ ObjectLiteral::Property* prop) {
+ Handle<Object> map_or_code = GetInfo(prop->key()->id());
+ return map_or_code->IsMap();
+}
+
+
+bool TypeFeedbackOracle::IsForInFastCase(ForInStatement* stmt) {
+ Handle<Object> value = GetInfo(stmt->PrepareId());
+ return value->IsSmi() &&
+ Smi::cast(*value)->value() == TypeFeedbackCells::kForInFastCaseMarker;
+}
+
+
Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
ASSERT(LoadIsMonomorphicNormal(expr));
Handle<Object> map_or_code = GetInfo(expr->id());
@@ -144,7 +186,9 @@ Handle<Map> TypeFeedbackOracle::LoadMonomorphicReceiverType(Property* expr) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
Map* first_map = code->FindFirstMap();
ASSERT(first_map != NULL);
- return Handle<Map>(first_map);
+ return CanRetainOtherContext(first_map, *global_context_)
+ ? Handle<Map>::null()
+ : Handle<Map>(first_map);
}
return Handle<Map>::cast(map_or_code);
}
@@ -155,7 +199,11 @@ Handle<Map> TypeFeedbackOracle::StoreMonomorphicReceiverType(Expression* expr) {
Handle<Object> map_or_code = GetInfo(expr->id());
if (map_or_code->IsCode()) {
Handle<Code> code = Handle<Code>::cast(map_or_code);
- return Handle<Map>(code->FindFirstMap());
+ Map* first_map = code->FindFirstMap();
+ ASSERT(first_map != NULL);
+ return CanRetainOtherContext(first_map, *global_context_)
+ ? Handle<Map>::null()
+ : Handle<Map>(first_map);
}
return Handle<Map>::cast(map_or_code);
}
@@ -233,6 +281,18 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(Call* expr) {
}
+Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(CallNew* expr) {
+ return Handle<JSFunction>::cast(GetInfo(expr->id()));
+}
+
+
+Handle<Map> TypeFeedbackOracle::GetObjectLiteralStoreMap(
+ ObjectLiteral::Property* prop) {
+ ASSERT(ObjectLiteralStoreIsMonomorphic(prop));
+ return Handle<Map>::cast(GetInfo(prop->key()->id()));
+}
+
+
bool TypeFeedbackOracle::LoadIsBuiltin(Property* expr, Builtins::Name id) {
return *GetInfo(expr->id()) ==
isolate_->builtins()->builtin(id);
@@ -259,6 +319,7 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr) {
case CompareIC::STRINGS:
return TypeInfo::String();
case CompareIC::OBJECTS:
+ case CompareIC::KNOWN_OBJECTS:
// TODO(kasperl): We really need a type for JS objects here.
return TypeInfo::NonPrimitive();
case CompareIC::GENERIC:
@@ -278,6 +339,23 @@ bool TypeFeedbackOracle::IsSymbolCompare(CompareOperation* expr) {
}
+Handle<Map> TypeFeedbackOracle::GetCompareMap(CompareOperation* expr) {
+ Handle<Object> object = GetInfo(expr->id());
+ if (!object->IsCode()) return Handle<Map>::null();
+ Handle<Code> code = Handle<Code>::cast(object);
+ if (!code->is_compare_ic_stub()) return Handle<Map>::null();
+ CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
+ if (state != CompareIC::KNOWN_OBJECTS) {
+ return Handle<Map>::null();
+ }
+ Map* first_map = code->FindFirstMap();
+ ASSERT(first_map != NULL);
+ return CanRetainOtherContext(first_map, *global_context_)
+ ? Handle<Map>::null()
+ : Handle<Map>(first_map);
+}
+
+
TypeInfo TypeFeedbackOracle::UnaryType(UnaryOperation* expr) {
Handle<Object> object = GetInfo(expr->id());
TypeInfo unknown = TypeInfo::Unknown();
@@ -315,6 +393,10 @@ TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr) {
case BinaryOpIC::SMI:
switch (result_type) {
case BinaryOpIC::UNINITIALIZED:
+ if (expr->op() == Token::DIV) {
+ return TypeInfo::Double();
+ }
+ return TypeInfo::Smi();
case BinaryOpIC::SMI:
return TypeInfo::Smi();
case BinaryOpIC::INT32:
@@ -360,9 +442,14 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
+ case CompareIC::STRINGS:
+ return TypeInfo::String();
+ case CompareIC::SYMBOLS:
+ return TypeInfo::Symbol();
case CompareIC::HEAP_NUMBERS:
return TypeInfo::Number();
case CompareIC::OBJECTS:
+ case CompareIC::KNOWN_OBJECTS:
// TODO(kasperl): We really need a type for JS objects here.
return TypeInfo::NonPrimitive();
case CompareIC::GENERIC:
@@ -415,14 +502,52 @@ void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
ASSERT(Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC);
} else if (object->IsMap()) {
types->Add(Handle<Map>::cast(object));
- } else if (Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
+ } else if (FLAG_collect_megamorphic_maps_from_stub_cache &&
+ Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
types->Reserve(4);
ASSERT(object->IsCode());
- isolate_->stub_cache()->CollectMatchingMaps(types, *name, flags);
+ isolate_->stub_cache()->CollectMatchingMaps(types,
+ *name,
+ flags,
+ global_context_);
}
}
+// Check if a map originates from a given global context. We use this
+// information to filter out maps from different context to avoid
+// retaining objects from different tabs in Chrome via optimized code.
+bool TypeFeedbackOracle::CanRetainOtherContext(Map* map,
+ Context* global_context) {
+ Object* constructor = NULL;
+ while (!map->prototype()->IsNull()) {
+ constructor = map->constructor();
+ if (!constructor->IsNull()) {
+ // If the constructor is not null or a JSFunction, we have to
+ // conservatively assume that it may retain a global context.
+ if (!constructor->IsJSFunction()) return true;
+ // Check if the constructor directly references a foreign context.
+ if (CanRetainOtherContext(JSFunction::cast(constructor),
+ global_context)) {
+ return true;
+ }
+ }
+ map = HeapObject::cast(map->prototype())->map();
+ }
+ constructor = map->constructor();
+ if (constructor->IsNull()) return false;
+ JSFunction* function = JSFunction::cast(constructor);
+ return CanRetainOtherContext(function, global_context);
+}
+
+
+bool TypeFeedbackOracle::CanRetainOtherContext(JSFunction* function,
+ Context* global_context) {
+ return function->context()->global() != global_context->global()
+ && function->context()->global() != global_context->builtins();
+}
+
+
static void AddMapIfMissing(Handle<Map> map, SmallMapList* list) {
for (int i = 0; i < list->length(); ++i) {
if (list->at(i).is_identical_to(map)) return;
@@ -444,7 +569,10 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
RelocInfo* info = it.rinfo();
Object* object = info->target_object();
if (object->IsMap()) {
- AddMapIfMissing(Handle<Map>(Map::cast(object)), types);
+ Map* map = Map::cast(object);
+ if (!CanRetainOtherContext(map, *global_context_)) {
+ AddMapIfMissing(Handle<Map>(map), types);
+ }
}
}
}
@@ -468,6 +596,7 @@ void TypeFeedbackOracle::BuildDictionary(Handle<Code> code) {
GetRelocInfos(code, &infos);
CreateDictionary(code, &infos);
ProcessRelocInfos(&infos);
+ ProcessTypeFeedbackCells(code);
// Allocate handle in the parent scope.
dictionary_ = scope.CloseAndEscape(dictionary_);
}
@@ -485,8 +614,13 @@ void TypeFeedbackOracle::GetRelocInfos(Handle<Code> code,
void TypeFeedbackOracle::CreateDictionary(Handle<Code> code,
ZoneList<RelocInfo>* infos) {
DisableAssertNoAllocation allocation_allowed;
+ int cell_count = code->type_feedback_info()->IsTypeFeedbackInfo()
+ ? TypeFeedbackInfo::cast(code->type_feedback_info())->
+ type_feedback_cells()->CellCount()
+ : 0;
+ int length = infos->length() + cell_count;
byte* old_start = code->instruction_start();
- dictionary_ = FACTORY->NewNumberDictionary(infos->length());
+ dictionary_ = FACTORY->NewUnseededNumberDictionary(length);
byte* new_start = code->instruction_start();
RelocateRelocInfos(infos, old_start, new_start);
}
@@ -519,9 +653,14 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
SetInfo(ast_id, Smi::FromInt(target->check_type()));
} else {
Object* map = target->FindFirstMap();
- SetInfo(ast_id, map == NULL ? static_cast<Object*>(target) : map);
+ if (map == NULL) {
+ SetInfo(ast_id, static_cast<Object*>(target));
+ } else if (!CanRetainOtherContext(Map::cast(map),
+ *global_context_)) {
+ SetInfo(ast_id, map);
+ }
}
- } else if (target->ic_state() == MEGAMORPHIC) {
+ } else {
SetInfo(ast_id, target);
}
break;
@@ -541,16 +680,6 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
SetInfo(ast_id, target);
break;
- case Code::STUB:
- if (target->major_key() == CodeStub::CallFunction &&
- target->has_function_cache()) {
- Object* value = CallFunctionStub::GetCachedValue(reloc_entry.pc());
- if (value->IsJSFunction()) {
- SetInfo(ast_id, value);
- }
- }
- break;
-
default:
break;
}
@@ -558,8 +687,26 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList<RelocInfo>* infos) {
}
+void TypeFeedbackOracle::ProcessTypeFeedbackCells(Handle<Code> code) {
+ Object* raw_info = code->type_feedback_info();
+ if (!raw_info->IsTypeFeedbackInfo()) return;
+ Handle<TypeFeedbackCells> cache(
+ TypeFeedbackInfo::cast(raw_info)->type_feedback_cells());
+ for (int i = 0; i < cache->CellCount(); i++) {
+ unsigned ast_id = cache->AstId(i)->value();
+ Object* value = cache->Cell(i)->value();
+ if (value->IsSmi() ||
+ (value->IsJSFunction() &&
+ !CanRetainOtherContext(JSFunction::cast(value),
+ *global_context_))) {
+ SetInfo(ast_id, value);
+ }
+ }
+}
+
+
void TypeFeedbackOracle::SetInfo(unsigned ast_id, Object* target) {
- ASSERT(dictionary_->FindEntry(ast_id) == NumberDictionary::kNotFound);
+ ASSERT(dictionary_->FindEntry(ast_id) == UnseededNumberDictionary::kNotFound);
MaybeObject* maybe_result = dictionary_->AtNumberPut(ast_id, target);
USE(maybe_result);
#ifdef DEBUG
diff --git a/src/3rdparty/v8/src/type-info.h b/src/3rdparty/v8/src/type-info.h
index 2c3543e..d461331 100644
--- a/src/3rdparty/v8/src/type-info.h
+++ b/src/3rdparty/v8/src/type-info.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,7 @@
#define V8_TYPE_INFO_H_
#include "allocation.h"
+#include "ast.h"
#include "globals.h"
#include "zone-inl.h"
@@ -64,6 +65,8 @@ class TypeInfo {
static TypeInfo Integer32() { return TypeInfo(kInteger32); }
// We know it's a Smi.
static TypeInfo Smi() { return TypeInfo(kSmi); }
+ // We know it's a Symbol.
+ static TypeInfo Symbol() { return TypeInfo(kSymbol); }
// We know it's a heap number.
static TypeInfo Double() { return TypeInfo(kDouble); }
// We know it's a string.
@@ -137,6 +140,16 @@ class TypeInfo {
return ((type_ & kSmi) == kSmi);
}
+ inline bool IsSymbol() {
+ ASSERT(type_ != kUninitialized);
+ return ((type_ & kSymbol) == kSymbol);
+ }
+
+ inline bool IsNonSymbol() {
+ ASSERT(type_ != kUninitialized);
+ return ((type_ & kSymbol) == kString);
+ }
+
inline bool IsInteger32() {
ASSERT(type_ != kUninitialized);
return ((type_ & kInteger32) == kInteger32);
@@ -168,6 +181,7 @@ class TypeInfo {
case kNumber: return "Number";
case kInteger32: return "Integer32";
case kSmi: return "Smi";
+ case kSymbol: return "Symbol";
case kDouble: return "Double";
case kString: return "String";
case kNonPrimitive: return "Object";
@@ -186,6 +200,7 @@ class TypeInfo {
kSmi = 0x17, // 0010111
kDouble = 0x19, // 0011001
kString = 0x30, // 0110000
+ kSymbol = 0x32, // 0110010
kNonPrimitive = 0x40, // 1000000
kUninitialized = 0x7f // 1111111
};
@@ -205,13 +220,16 @@ enum StringStubFeedback {
class Assignment;
class BinaryOperation;
class Call;
+class CallNew;
class CaseClause;
class CompareOperation;
class CompilationInfo;
class CountOperation;
+class Expression;
class Property;
class SmallMapList;
class UnaryOperation;
+class ForInStatement;
class TypeFeedbackOracle BASE_EMBEDDED {
@@ -221,10 +239,15 @@ class TypeFeedbackOracle BASE_EMBEDDED {
Isolate* isolate);
bool LoadIsMonomorphicNormal(Property* expr);
+ bool LoadIsUninitialized(Property* expr);
bool LoadIsMegamorphicWithTypeInfo(Property* expr);
bool StoreIsMonomorphicNormal(Expression* expr);
bool StoreIsMegamorphicWithTypeInfo(Expression* expr);
bool CallIsMonomorphic(Call* expr);
+ bool CallNewIsMonomorphic(CallNew* expr);
+ bool ObjectLiteralStoreIsMonomorphic(ObjectLiteral::Property* prop);
+
+ bool IsForInFastCase(ForInStatement* expr);
Handle<Map> LoadMonomorphicReceiverType(Property* expr);
Handle<Map> StoreMonomorphicReceiverType(Expression* expr);
@@ -242,10 +265,17 @@ class TypeFeedbackOracle BASE_EMBEDDED {
void CollectKeyedReceiverTypes(unsigned ast_id,
SmallMapList* types);
+ static bool CanRetainOtherContext(Map* map, Context* global_context);
+ static bool CanRetainOtherContext(JSFunction* function,
+ Context* global_context);
+
CheckType GetCallCheckType(Call* expr);
Handle<JSObject> GetPrototypeForPrimitiveCheck(CheckType check);
Handle<JSFunction> GetCallTarget(Call* expr);
+ Handle<JSFunction> GetCallNewTarget(CallNew* expr);
+
+ Handle<Map> GetObjectLiteralStoreMap(ObjectLiteral::Property* prop);
bool LoadIsBuiltin(Property* expr, Builtins::Name id);
@@ -259,6 +289,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
TypeInfo BinaryType(BinaryOperation* expr);
TypeInfo CompareType(CompareOperation* expr);
bool IsSymbolCompare(CompareOperation* expr);
+ Handle<Map> GetCompareMap(CompareOperation* expr);
TypeInfo SwitchType(CaseClause* clause);
TypeInfo IncrementType(CountOperation* expr);
@@ -277,6 +308,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
byte* old_start,
byte* new_start);
void ProcessRelocInfos(ZoneList<RelocInfo>* infos);
+ void ProcessTypeFeedbackCells(Handle<Code> code);
// Returns an element from the backing store. Returns undefined if
// there is no information.
@@ -284,7 +316,7 @@ class TypeFeedbackOracle BASE_EMBEDDED {
Handle<Context> global_context_;
Isolate* isolate_;
- Handle<NumberDictionary> dictionary_;
+ Handle<UnseededNumberDictionary> dictionary_;
DISALLOW_COPY_AND_ASSIGN(TypeFeedbackOracle);
};
diff --git a/src/3rdparty/v8/src/unicode-inl.h b/src/3rdparty/v8/src/unicode-inl.h
index c0649d7..9c0ebf9 100644
--- a/src/3rdparty/v8/src/unicode-inl.h
+++ b/src/3rdparty/v8/src/unicode-inl.h
@@ -78,7 +78,7 @@ template <class T, int s> int Mapping<T, s>::CalculateValue(uchar c, uchar n,
}
-unsigned Utf8::Encode(char* str, uchar c) {
+unsigned Utf8::Encode(char* str, uchar c, int previous) {
static const int kMask = ~(1 << 6);
if (c <= kMaxOneByteChar) {
str[0] = c;
@@ -88,6 +88,13 @@ unsigned Utf8::Encode(char* str, uchar c) {
str[1] = 0x80 | (c & kMask);
return 2;
} else if (c <= kMaxThreeByteChar) {
+ if (Utf16::IsTrailSurrogate(c) &&
+ Utf16::IsLeadSurrogate(previous)) {
+ const int kUnmatchedSize = kSizeOfUnmatchedSurrogate;
+ return Encode(str - kUnmatchedSize,
+ Utf16::CombineSurrogatePair(previous, c),
+ Utf16::kNoPreviousCharacter) - kUnmatchedSize;
+ }
str[0] = 0xE0 | (c >> 12);
str[1] = 0x80 | ((c >> 6) & kMask);
str[2] = 0x80 | (c & kMask);
@@ -113,12 +120,16 @@ uchar Utf8::ValueOf(const byte* bytes, unsigned length, unsigned* cursor) {
return CalculateValue(bytes, length, cursor);
}
-unsigned Utf8::Length(uchar c) {
+unsigned Utf8::Length(uchar c, int previous) {
if (c <= kMaxOneByteChar) {
return 1;
} else if (c <= kMaxTwoByteChar) {
return 2;
} else if (c <= kMaxThreeByteChar) {
+ if (Utf16::IsTrailSurrogate(c) &&
+ Utf16::IsLeadSurrogate(previous)) {
+ return kSizeOfUnmatchedSurrogate - kBytesSavedByCombiningSurrogates;
+ }
return 3;
} else {
return 4;
diff --git a/src/3rdparty/v8/src/unicode.cc b/src/3rdparty/v8/src/unicode.cc
index 6e0ac1a..14f3806 100644
--- a/src/3rdparty/v8/src/unicode.cc
+++ b/src/3rdparty/v8/src/unicode.cc
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
-// This file was generated at 2011-01-03 10:57:02.088925
+// This file was generated at 2012-03-06 09:55:58.934483
#include "unicode-inl.h"
#include <stdlib.h>
@@ -210,7 +210,7 @@ static int LookupMapping(const int32_t* table,
uchar Utf8::CalculateValue(const byte* str,
unsigned length,
unsigned* cursor) {
- // We only get called for non-ascii characters.
+ // We only get called for non-ASCII characters.
if (length == 1) {
*cursor += 1;
return kBadChar;
@@ -276,6 +276,7 @@ uchar Utf8::CalculateValue(const byte* str,
return kBadChar;
}
+
const byte* Utf8::ReadBlock(Buffer<const char*> str, byte* buffer,
unsigned capacity, unsigned* chars_read_ptr, unsigned* offset_ptr) {
unsigned offset = *offset_ptr;
@@ -286,8 +287,8 @@ const byte* Utf8::ReadBlock(Buffer<const char*> str, byte* buffer,
}
const byte* data = reinterpret_cast<const byte*>(str.data());
if (data[offset] <= kMaxOneByteChar) {
- // The next character is an ascii char so we scan forward over
- // the following ascii characters and return the next pure ascii
+ // The next character is an ASCII char so we scan forward over
+ // the following ASCII characters and return the next pure ASCII
// substring
const byte* result = data + offset;
offset++;
@@ -297,13 +298,13 @@ const byte* Utf8::ReadBlock(Buffer<const char*> str, byte* buffer,
*offset_ptr = offset;
return result;
} else {
- // The next character is non-ascii so we just fill the buffer
+ // The next character is non-ASCII so we just fill the buffer
unsigned cursor = 0;
unsigned chars_read = 0;
while (offset < str.length()) {
uchar c = data[offset];
if (c <= kMaxOneByteChar) {
- // Fast case for ascii characters
+ // Fast case for ASCII characters
if (!CharacterStream::EncodeAsciiCharacter(c,
buffer,
capacity,
@@ -338,6 +339,16 @@ unsigned CharacterStream::Length() {
return result;
}
+unsigned CharacterStream::Utf16Length() {
+ unsigned result = 0;
+ while (has_more()) {
+ uchar c = GetNext();
+ result += c > Utf16::kMaxNonSurrogateCharCode ? 2 : 1;
+ }
+ Rewind();
+ return result;
+}
+
void CharacterStream::Seek(unsigned position) {
Rewind();
for (unsigned i = 0; i < position; i++) {
@@ -347,8 +358,8 @@ void CharacterStream::Seek(unsigned position) {
// Uppercase: point.category == 'Lu'
-static const uint16_t kUppercaseTable0Size = 430;
-static const int32_t kUppercaseTable0[430] = {
+static const uint16_t kUppercaseTable0Size = 450;
+static const int32_t kUppercaseTable0[450] = {
1073741889, 90, 1073742016, 214, 1073742040, 222, 256, 258, // NOLINT
260, 262, 264, 266, 268, 270, 272, 274, // NOLINT
276, 278, 280, 282, 284, 286, 288, 290, // NOLINT
@@ -369,22 +380,24 @@ static const int32_t kUppercaseTable0[430] = {
530, 532, 534, 536, 538, 540, 542, 544, // NOLINT
546, 548, 550, 552, 554, 556, 558, 560, // NOLINT
562, 1073742394, 571, 1073742397, 574, 577, 1073742403, 582, // NOLINT
- 584, 586, 588, 590, 902, 1073742728, 906, 908, // NOLINT
- 1073742734, 911, 1073742737, 929, 1073742755, 939, 1073742802, 980, // NOLINT
- 984, 986, 988, 990, 992, 994, 996, 998, // NOLINT
- 1000, 1002, 1004, 1006, 1012, 1015, 1073742841, 1018, // NOLINT
- 1073742845, 1071, 1120, 1122, 1124, 1126, 1128, 1130, // NOLINT
- 1132, 1134, 1136, 1138, 1140, 1142, 1144, 1146, // NOLINT
- 1148, 1150, 1152, 1162, 1164, 1166, 1168, 1170, // NOLINT
- 1172, 1174, 1176, 1178, 1180, 1182, 1184, 1186, // NOLINT
- 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, // NOLINT
- 1204, 1206, 1208, 1210, 1212, 1214, 1073743040, 1217, // NOLINT
- 1219, 1221, 1223, 1225, 1227, 1229, 1232, 1234, // NOLINT
- 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, // NOLINT
- 1252, 1254, 1256, 1258, 1260, 1262, 1264, 1266, // NOLINT
- 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, // NOLINT
- 1284, 1286, 1288, 1290, 1292, 1294, 1296, 1298, // NOLINT
- 1073743153, 1366, 1073746080, 4293, 7680, 7682, 7684, 7686, // NOLINT
+ 584, 586, 588, 590, 880, 882, 886, 902, // NOLINT
+ 1073742728, 906, 908, 1073742734, 911, 1073742737, 929, 1073742755, // NOLINT
+ 939, 975, 1073742802, 980, 984, 986, 988, 990, // NOLINT
+ 992, 994, 996, 998, 1000, 1002, 1004, 1006, // NOLINT
+ 1012, 1015, 1073742841, 1018, 1073742845, 1071, 1120, 1122, // NOLINT
+ 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, // NOLINT
+ 1140, 1142, 1144, 1146, 1148, 1150, 1152, 1162, // NOLINT
+ 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178, // NOLINT
+ 1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, // NOLINT
+ 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, // NOLINT
+ 1212, 1214, 1073743040, 1217, 1219, 1221, 1223, 1225, // NOLINT
+ 1227, 1229, 1232, 1234, 1236, 1238, 1240, 1242, // NOLINT
+ 1244, 1246, 1248, 1250, 1252, 1254, 1256, 1258, // NOLINT
+ 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, // NOLINT
+ 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, // NOLINT
+ 1292, 1294, 1296, 1298, 1300, 1302, 1304, 1306, // NOLINT
+ 1308, 1310, 1312, 1314, 1316, 1318, 1073743153, 1366, // NOLINT
+ 1073746080, 4293, 4295, 4301, 7680, 7682, 7684, 7686, // NOLINT
7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, // NOLINT
7704, 7706, 7708, 7710, 7712, 7714, 7716, 7718, // NOLINT
7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, // NOLINT
@@ -393,28 +406,44 @@ static const int32_t kUppercaseTable0[430] = {
7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, // NOLINT
7784, 7786, 7788, 7790, 7792, 7794, 7796, 7798, // NOLINT
7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, // NOLINT
- 7816, 7818, 7820, 7822, 7824, 7826, 7828, 7840, // NOLINT
- 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, // NOLINT
- 7858, 7860, 7862, 7864, 7866, 7868, 7870, 7872, // NOLINT
- 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, // NOLINT
- 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904, // NOLINT
- 7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, // NOLINT
- 7922, 7924, 7926, 7928, 1073749768, 7951, 1073749784, 7965, // NOLINT
- 1073749800, 7983, 1073749816, 7999, 1073749832, 8013, 8025, 8027, // NOLINT
- 8029, 8031, 1073749864, 8047, 1073749944, 8123, 1073749960, 8139, // NOLINT
- 1073749976, 8155, 1073749992, 8172, 1073750008, 8187 }; // NOLINT
-static const uint16_t kUppercaseTable1Size = 79;
-static const int32_t kUppercaseTable1[79] = {
+ 7816, 7818, 7820, 7822, 7824, 7826, 7828, 7838, // NOLINT
+ 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, // NOLINT
+ 7856, 7858, 7860, 7862, 7864, 7866, 7868, 7870, // NOLINT
+ 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, // NOLINT
+ 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, // NOLINT
+ 7904, 7906, 7908, 7910, 7912, 7914, 7916, 7918, // NOLINT
+ 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, // NOLINT
+ 1073749768, 7951, 1073749784, 7965, 1073749800, 7983, 1073749816, 7999, // NOLINT
+ 1073749832, 8013, 8025, 8027, 8029, 8031, 1073749864, 8047, // NOLINT
+ 1073749944, 8123, 1073749960, 8139, 1073749976, 8155, 1073749992, 8172, // NOLINT
+ 1073750008, 8187 }; // NOLINT
+static const uint16_t kUppercaseTable1Size = 86;
+static const int32_t kUppercaseTable1[86] = {
258, 263, 1073742091, 269, 1073742096, 274, 277, 1073742105, // NOLINT
285, 292, 294, 296, 1073742122, 301, 1073742128, 307, // NOLINT
1073742142, 319, 325, 387, 1073744896, 3118, 3168, 1073744994, // NOLINT
- 3172, 3175, 3177, 3179, 3189, 3200, 3202, 3204, // NOLINT
- 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, // NOLINT
- 3222, 3224, 3226, 3228, 3230, 3232, 3234, 3236, // NOLINT
- 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, // NOLINT
- 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268, // NOLINT
- 3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, // NOLINT
- 3286, 3288, 3290, 3292, 3294, 3296, 3298 }; // NOLINT
+ 3172, 3175, 3177, 3179, 1073745005, 3184, 3186, 3189, // NOLINT
+ 1073745022, 3200, 3202, 3204, 3206, 3208, 3210, 3212, // NOLINT
+ 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, // NOLINT
+ 3230, 3232, 3234, 3236, 3238, 3240, 3242, 3244, // NOLINT
+ 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, // NOLINT
+ 3262, 3264, 3266, 3268, 3270, 3272, 3274, 3276, // NOLINT
+ 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, // NOLINT
+ 3294, 3296, 3298, 3307, 3309, 3314 }; // NOLINT
+static const uint16_t kUppercaseTable5Size = 91;
+static const int32_t kUppercaseTable5[91] = {
+ 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, // NOLINT
+ 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, // NOLINT
+ 1632, 1634, 1636, 1638, 1640, 1642, 1644, 1664, // NOLINT
+ 1666, 1668, 1670, 1672, 1674, 1676, 1678, 1680, // NOLINT
+ 1682, 1684, 1686, 1826, 1828, 1830, 1832, 1834, // NOLINT
+ 1836, 1838, 1842, 1844, 1846, 1848, 1850, 1852, // NOLINT
+ 1854, 1856, 1858, 1860, 1862, 1864, 1866, 1868, // NOLINT
+ 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, // NOLINT
+ 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900, // NOLINT
+ 1902, 1913, 1915, 1073743741, 1918, 1920, 1922, 1924, // NOLINT
+ 1926, 1931, 1933, 1936, 1938, 1952, 1954, 1956, // NOLINT
+ 1958, 1960, 1962 }; // NOLINT
static const uint16_t kUppercaseTable7Size = 2;
static const int32_t kUppercaseTable7[2] = {
1073749793, 7994 }; // NOLINT
@@ -427,6 +456,9 @@ bool Uppercase::Is(uchar c) {
case 1: return LookupPredicate(kUppercaseTable1,
kUppercaseTable1Size,
c);
+ case 5: return LookupPredicate(kUppercaseTable5,
+ kUppercaseTable5Size,
+ c);
case 7: return LookupPredicate(kUppercaseTable7,
kUppercaseTable7Size,
c);
@@ -436,77 +468,93 @@ bool Uppercase::Is(uchar c) {
// Lowercase: point.category == 'Ll'
-static const uint16_t kLowercaseTable0Size = 449;
-static const int32_t kLowercaseTable0[449] = {
- 1073741921, 122, 170, 181, 186, 1073742047, 246, 1073742072, // NOLINT
- 255, 257, 259, 261, 263, 265, 267, 269, // NOLINT
- 271, 273, 275, 277, 279, 281, 283, 285, // NOLINT
- 287, 289, 291, 293, 295, 297, 299, 301, // NOLINT
- 303, 305, 307, 309, 1073742135, 312, 314, 316, // NOLINT
- 318, 320, 322, 324, 326, 1073742152, 329, 331, // NOLINT
- 333, 335, 337, 339, 341, 343, 345, 347, // NOLINT
- 349, 351, 353, 355, 357, 359, 361, 363, // NOLINT
- 365, 367, 369, 371, 373, 375, 378, 380, // NOLINT
- 1073742206, 384, 387, 389, 392, 1073742220, 397, 402, // NOLINT
- 405, 1073742233, 411, 414, 417, 419, 421, 424, // NOLINT
- 1073742250, 427, 429, 432, 436, 438, 1073742265, 442, // NOLINT
- 1073742269, 447, 454, 457, 460, 462, 464, 466, // NOLINT
- 468, 470, 472, 474, 1073742300, 477, 479, 481, // NOLINT
- 483, 485, 487, 489, 491, 493, 1073742319, 496, // NOLINT
- 499, 501, 505, 507, 509, 511, 513, 515, // NOLINT
- 517, 519, 521, 523, 525, 527, 529, 531, // NOLINT
- 533, 535, 537, 539, 541, 543, 545, 547, // NOLINT
- 549, 551, 553, 555, 557, 559, 561, 1073742387, // NOLINT
- 569, 572, 1073742399, 576, 578, 583, 585, 587, // NOLINT
- 589, 1073742415, 659, 1073742485, 687, 1073742715, 893, 912, // NOLINT
- 1073742764, 974, 1073742800, 977, 1073742805, 983, 985, 987, // NOLINT
- 989, 991, 993, 995, 997, 999, 1001, 1003, // NOLINT
- 1005, 1073742831, 1011, 1013, 1016, 1073742843, 1020, 1073742896, // NOLINT
- 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, // NOLINT
- 1135, 1137, 1139, 1141, 1143, 1145, 1147, 1149, // NOLINT
- 1151, 1153, 1163, 1165, 1167, 1169, 1171, 1173, // NOLINT
- 1175, 1177, 1179, 1181, 1183, 1185, 1187, 1189, // NOLINT
- 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, // NOLINT
- 1207, 1209, 1211, 1213, 1215, 1218, 1220, 1222, // NOLINT
- 1224, 1226, 1228, 1073743054, 1231, 1233, 1235, 1237, // NOLINT
- 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, // NOLINT
- 1255, 1257, 1259, 1261, 1263, 1265, 1267, 1269, // NOLINT
- 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, // NOLINT
- 1287, 1289, 1291, 1293, 1295, 1297, 1299, 1073743201, // NOLINT
- 1415, 1073749248, 7467, 1073749346, 7543, 1073749369, 7578, 7681, // NOLINT
- 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, // NOLINT
- 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, // NOLINT
- 7715, 7717, 7719, 7721, 7723, 7725, 7727, 7729, // NOLINT
- 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, // NOLINT
- 7747, 7749, 7751, 7753, 7755, 7757, 7759, 7761, // NOLINT
- 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, // NOLINT
- 7779, 7781, 7783, 7785, 7787, 7789, 7791, 7793, // NOLINT
- 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, // NOLINT
- 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, // NOLINT
- 7827, 1073749653, 7835, 7841, 7843, 7845, 7847, 7849, // NOLINT
- 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, // NOLINT
- 7867, 7869, 7871, 7873, 7875, 7877, 7879, 7881, // NOLINT
- 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, // NOLINT
- 7899, 7901, 7903, 7905, 7907, 7909, 7911, 7913, // NOLINT
- 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, // NOLINT
- 1073749760, 7943, 1073749776, 7957, 1073749792, 7975, 1073749808, 7991, // NOLINT
- 1073749824, 8005, 1073749840, 8023, 1073749856, 8039, 1073749872, 8061, // NOLINT
- 1073749888, 8071, 1073749904, 8087, 1073749920, 8103, 1073749936, 8116, // NOLINT
- 1073749942, 8119, 8126, 1073749954, 8132, 1073749958, 8135, 1073749968, // NOLINT
- 8147, 1073749974, 8151, 1073749984, 8167, 1073750002, 8180, 1073750006, // NOLINT
- 8183 }; // NOLINT
-static const uint16_t kLowercaseTable1Size = 79;
-static const int32_t kLowercaseTable1[79] = {
- 113, 127, 266, 1073742094, 271, 275, 303, 308, // NOLINT
- 313, 1073742140, 317, 1073742150, 329, 334, 388, 1073744944, // NOLINT
- 3166, 3169, 1073744997, 3174, 3176, 3178, 3180, 3188, // NOLINT
- 1073745014, 3191, 3201, 3203, 3205, 3207, 3209, 3211, // NOLINT
+static const uint16_t kLowercaseTable0Size = 463;
+static const int32_t kLowercaseTable0[463] = {
+ 1073741921, 122, 181, 1073742047, 246, 1073742072, 255, 257, // NOLINT
+ 259, 261, 263, 265, 267, 269, 271, 273, // NOLINT
+ 275, 277, 279, 281, 283, 285, 287, 289, // NOLINT
+ 291, 293, 295, 297, 299, 301, 303, 305, // NOLINT
+ 307, 309, 1073742135, 312, 314, 316, 318, 320, // NOLINT
+ 322, 324, 326, 1073742152, 329, 331, 333, 335, // NOLINT
+ 337, 339, 341, 343, 345, 347, 349, 351, // NOLINT
+ 353, 355, 357, 359, 361, 363, 365, 367, // NOLINT
+ 369, 371, 373, 375, 378, 380, 1073742206, 384, // NOLINT
+ 387, 389, 392, 1073742220, 397, 402, 405, 1073742233, // NOLINT
+ 411, 414, 417, 419, 421, 424, 1073742250, 427, // NOLINT
+ 429, 432, 436, 438, 1073742265, 442, 1073742269, 447, // NOLINT
+ 454, 457, 460, 462, 464, 466, 468, 470, // NOLINT
+ 472, 474, 1073742300, 477, 479, 481, 483, 485, // NOLINT
+ 487, 489, 491, 493, 1073742319, 496, 499, 501, // NOLINT
+ 505, 507, 509, 511, 513, 515, 517, 519, // NOLINT
+ 521, 523, 525, 527, 529, 531, 533, 535, // NOLINT
+ 537, 539, 541, 543, 545, 547, 549, 551, // NOLINT
+ 553, 555, 557, 559, 561, 1073742387, 569, 572, // NOLINT
+ 1073742399, 576, 578, 583, 585, 587, 589, 1073742415, // NOLINT
+ 659, 1073742485, 687, 881, 883, 887, 1073742715, 893, // NOLINT
+ 912, 1073742764, 974, 1073742800, 977, 1073742805, 983, 985, // NOLINT
+ 987, 989, 991, 993, 995, 997, 999, 1001, // NOLINT
+ 1003, 1005, 1073742831, 1011, 1013, 1016, 1073742843, 1020, // NOLINT
+ 1073742896, 1119, 1121, 1123, 1125, 1127, 1129, 1131, // NOLINT
+ 1133, 1135, 1137, 1139, 1141, 1143, 1145, 1147, // NOLINT
+ 1149, 1151, 1153, 1163, 1165, 1167, 1169, 1171, // NOLINT
+ 1173, 1175, 1177, 1179, 1181, 1183, 1185, 1187, // NOLINT
+ 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, // NOLINT
+ 1205, 1207, 1209, 1211, 1213, 1215, 1218, 1220, // NOLINT
+ 1222, 1224, 1226, 1228, 1073743054, 1231, 1233, 1235, // NOLINT
+ 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, // NOLINT
+ 1253, 1255, 1257, 1259, 1261, 1263, 1265, 1267, // NOLINT
+ 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, // NOLINT
+ 1285, 1287, 1289, 1291, 1293, 1295, 1297, 1299, // NOLINT
+ 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, // NOLINT
+ 1317, 1319, 1073743201, 1415, 1073749248, 7467, 1073749355, 7543, // NOLINT
+ 1073749369, 7578, 7681, 7683, 7685, 7687, 7689, 7691, // NOLINT
+ 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, // NOLINT
+ 7709, 7711, 7713, 7715, 7717, 7719, 7721, 7723, // NOLINT
+ 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, // NOLINT
+ 7741, 7743, 7745, 7747, 7749, 7751, 7753, 7755, // NOLINT
+ 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, // NOLINT
+ 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, // NOLINT
+ 7789, 7791, 7793, 7795, 7797, 7799, 7801, 7803, // NOLINT
+ 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, // NOLINT
+ 7821, 7823, 7825, 7827, 1073749653, 7837, 7839, 7841, // NOLINT
+ 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, // NOLINT
+ 7859, 7861, 7863, 7865, 7867, 7869, 7871, 7873, // NOLINT
+ 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, // NOLINT
+ 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905, // NOLINT
+ 7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, // NOLINT
+ 7923, 7925, 7927, 7929, 7931, 7933, 1073749759, 7943, // NOLINT
+ 1073749776, 7957, 1073749792, 7975, 1073749808, 7991, 1073749824, 8005, // NOLINT
+ 1073749840, 8023, 1073749856, 8039, 1073749872, 8061, 1073749888, 8071, // NOLINT
+ 1073749904, 8087, 1073749920, 8103, 1073749936, 8116, 1073749942, 8119, // NOLINT
+ 8126, 1073749954, 8132, 1073749958, 8135, 1073749968, 8147, 1073749974, // NOLINT
+ 8151, 1073749984, 8167, 1073750002, 8180, 1073750006, 8183 }; // NOLINT
+static const uint16_t kLowercaseTable1Size = 84;
+static const int32_t kLowercaseTable1[84] = {
+ 266, 1073742094, 271, 275, 303, 308, 313, 1073742140, // NOLINT
+ 317, 1073742150, 329, 334, 388, 1073744944, 3166, 3169, // NOLINT
+ 1073744997, 3174, 3176, 3178, 3180, 3185, 1073745011, 3188, // NOLINT
+ 1073745014, 3195, 3201, 3203, 3205, 3207, 3209, 3211, // NOLINT
3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, // NOLINT
3229, 3231, 3233, 3235, 3237, 3239, 3241, 3243, // NOLINT
3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, // NOLINT
3261, 3263, 3265, 3267, 3269, 3271, 3273, 3275, // NOLINT
3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, // NOLINT
- 3293, 3295, 3297, 1073745123, 3300, 1073745152, 3365 }; // NOLINT
+ 3293, 3295, 3297, 1073745123, 3300, 3308, 3310, 3315, // NOLINT
+ 1073745152, 3365, 3367, 3373 }; // NOLINT
+static const uint16_t kLowercaseTable5Size = 93;
+static const int32_t kLowercaseTable5[93] = {
+ 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, // NOLINT
+ 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, // NOLINT
+ 1633, 1635, 1637, 1639, 1641, 1643, 1645, 1665, // NOLINT
+ 1667, 1669, 1671, 1673, 1675, 1677, 1679, 1681, // NOLINT
+ 1683, 1685, 1687, 1827, 1829, 1831, 1833, 1835, // NOLINT
+ 1837, 1073743663, 1841, 1843, 1845, 1847, 1849, 1851, // NOLINT
+ 1853, 1855, 1857, 1859, 1861, 1863, 1865, 1867, // NOLINT
+ 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, // NOLINT
+ 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, // NOLINT
+ 1901, 1903, 1073743729, 1912, 1914, 1916, 1919, 1921, // NOLINT
+ 1923, 1925, 1927, 1932, 1934, 1937, 1939, 1953, // NOLINT
+ 1955, 1957, 1959, 1961, 2042 }; // NOLINT
static const uint16_t kLowercaseTable7Size = 6;
static const int32_t kLowercaseTable7[6] = {
1073748736, 6918, 1073748755, 6935, 1073749825, 8026 }; // NOLINT
@@ -519,6 +567,9 @@ bool Lowercase::Is(uchar c) {
case 1: return LookupPredicate(kLowercaseTable1,
kLowercaseTable1Size,
c);
+ case 5: return LookupPredicate(kLowercaseTable5,
+ kLowercaseTable5Size,
+ c);
case 7: return LookupPredicate(kLowercaseTable7,
kLowercaseTable7Size,
c);
@@ -528,71 +579,76 @@ bool Lowercase::Is(uchar c) {
// Letter: point.category in ['Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl' ]
-static const uint16_t kLetterTable0Size = 394;
-static const int32_t kLetterTable0[394] = {
+static const uint16_t kLetterTable0Size = 435;
+static const int32_t kLetterTable0[435] = {
1073741889, 90, 1073741921, 122, 170, 181, 186, 1073742016, // NOLINT
214, 1073742040, 246, 1073742072, 705, 1073742534, 721, 1073742560, // NOLINT
- 740, 750, 1073742714, 893, 902, 1073742728, 906, 908, // NOLINT
- 1073742734, 929, 1073742755, 974, 1073742800, 1013, 1073742839, 1153, // NOLINT
- 1073742986, 1299, 1073743153, 1366, 1369, 1073743201, 1415, 1073743312, // NOLINT
- 1514, 1073743344, 1522, 1073743393, 1594, 1073743424, 1610, 1073743470, // NOLINT
- 1647, 1073743473, 1747, 1749, 1073743589, 1766, 1073743598, 1775, // NOLINT
- 1073743610, 1788, 1791, 1808, 1073743634, 1839, 1073743693, 1901, // NOLINT
- 1073743744, 1957, 1969, 1073743818, 2026, 1073743860, 2037, 2042, // NOLINT
- 1073744132, 2361, 2365, 2384, 1073744216, 2401, 1073744251, 2431, // NOLINT
- 1073744261, 2444, 1073744271, 2448, 1073744275, 2472, 1073744298, 2480, // NOLINT
- 2482, 1073744310, 2489, 2493, 2510, 1073744348, 2525, 1073744351, // NOLINT
- 2529, 1073744368, 2545, 1073744389, 2570, 1073744399, 2576, 1073744403, // NOLINT
- 2600, 1073744426, 2608, 1073744434, 2611, 1073744437, 2614, 1073744440, // NOLINT
- 2617, 1073744473, 2652, 2654, 1073744498, 2676, 1073744517, 2701, // NOLINT
- 1073744527, 2705, 1073744531, 2728, 1073744554, 2736, 1073744562, 2739, // NOLINT
- 1073744565, 2745, 2749, 2768, 1073744608, 2785, 1073744645, 2828, // NOLINT
- 1073744655, 2832, 1073744659, 2856, 1073744682, 2864, 1073744690, 2867, // NOLINT
- 1073744693, 2873, 2877, 1073744732, 2909, 1073744735, 2913, 2929, // NOLINT
- 2947, 1073744773, 2954, 1073744782, 2960, 1073744786, 2965, 1073744793, // NOLINT
- 2970, 2972, 1073744798, 2975, 1073744803, 2980, 1073744808, 2986, // NOLINT
- 1073744814, 3001, 1073744901, 3084, 1073744910, 3088, 1073744914, 3112, // NOLINT
- 1073744938, 3123, 1073744949, 3129, 1073744992, 3169, 1073745029, 3212, // NOLINT
- 1073745038, 3216, 1073745042, 3240, 1073745066, 3251, 1073745077, 3257, // NOLINT
- 3261, 3294, 1073745120, 3297, 1073745157, 3340, 1073745166, 3344, // NOLINT
- 1073745170, 3368, 1073745194, 3385, 1073745248, 3425, 1073745285, 3478, // NOLINT
- 1073745306, 3505, 1073745331, 3515, 3517, 1073745344, 3526, 1073745409, // NOLINT
- 3632, 1073745458, 3635, 1073745472, 3654, 1073745537, 3714, 3716, // NOLINT
- 1073745543, 3720, 3722, 3725, 1073745556, 3735, 1073745561, 3743, // NOLINT
- 1073745569, 3747, 3749, 3751, 1073745578, 3755, 1073745581, 3760, // NOLINT
- 1073745586, 3763, 3773, 1073745600, 3780, 3782, 1073745628, 3805, // NOLINT
- 3840, 1073745728, 3911, 1073745737, 3946, 1073745800, 3979, 1073745920, // NOLINT
- 4129, 1073745955, 4135, 1073745961, 4138, 1073746000, 4181, 1073746080, // NOLINT
- 4293, 1073746128, 4346, 4348, 1073746176, 4441, 1073746271, 4514, // NOLINT
- 1073746344, 4601, 1073746432, 4680, 1073746506, 4685, 1073746512, 4694, // NOLINT
- 4696, 1073746522, 4701, 1073746528, 4744, 1073746570, 4749, 1073746576, // NOLINT
- 4784, 1073746610, 4789, 1073746616, 4798, 4800, 1073746626, 4805, // NOLINT
- 1073746632, 4822, 1073746648, 4880, 1073746706, 4885, 1073746712, 4954, // NOLINT
- 1073746816, 5007, 1073746848, 5108, 1073746945, 5740, 1073747567, 5750, // NOLINT
- 1073747585, 5786, 1073747616, 5866, 1073747694, 5872, 1073747712, 5900, // NOLINT
- 1073747726, 5905, 1073747744, 5937, 1073747776, 5969, 1073747808, 5996, // NOLINT
- 1073747822, 6000, 1073747840, 6067, 6103, 6108, 1073748000, 6263, // NOLINT
- 1073748096, 6312, 1073748224, 6428, 1073748304, 6509, 1073748336, 6516, // NOLINT
- 1073748352, 6569, 1073748417, 6599, 1073748480, 6678, 1073748741, 6963, // NOLINT
- 1073748805, 6987, 1073749248, 7615, 1073749504, 7835, 1073749664, 7929, // NOLINT
- 1073749760, 7957, 1073749784, 7965, 1073749792, 8005, 1073749832, 8013, // NOLINT
- 1073749840, 8023, 8025, 8027, 8029, 1073749855, 8061, 1073749888, // NOLINT
- 8116, 1073749942, 8124, 8126, 1073749954, 8132, 1073749958, 8140, // NOLINT
- 1073749968, 8147, 1073749974, 8155, 1073749984, 8172, 1073750002, 8180, // NOLINT
- 1073750006, 8188 }; // NOLINT
-static const uint16_t kLetterTable1Size = 84;
-static const int32_t kLetterTable1[84] = {
- 113, 127, 1073741968, 148, 258, 263, 1073742090, 275, // NOLINT
+ 740, 748, 750, 1073742704, 884, 1073742710, 887, 1073742714, // NOLINT
+ 893, 902, 1073742728, 906, 908, 1073742734, 929, 1073742755, // NOLINT
+ 1013, 1073742839, 1153, 1073742986, 1319, 1073743153, 1366, 1369, // NOLINT
+ 1073743201, 1415, 1073743312, 1514, 1073743344, 1522, 1073743392, 1610, // NOLINT
+ 1073743470, 1647, 1073743473, 1747, 1749, 1073743589, 1766, 1073743598, // NOLINT
+ 1775, 1073743610, 1788, 1791, 1808, 1073743634, 1839, 1073743693, // NOLINT
+ 1957, 1969, 1073743818, 2026, 1073743860, 2037, 2042, 1073743872, // NOLINT
+ 2069, 2074, 2084, 2088, 1073743936, 2136, 2208, 1073744034, // NOLINT
+ 2220, 1073744132, 2361, 2365, 2384, 1073744216, 2401, 1073744241, // NOLINT
+ 2423, 1073744249, 2431, 1073744261, 2444, 1073744271, 2448, 1073744275, // NOLINT
+ 2472, 1073744298, 2480, 2482, 1073744310, 2489, 2493, 2510, // NOLINT
+ 1073744348, 2525, 1073744351, 2529, 1073744368, 2545, 1073744389, 2570, // NOLINT
+ 1073744399, 2576, 1073744403, 2600, 1073744426, 2608, 1073744434, 2611, // NOLINT
+ 1073744437, 2614, 1073744440, 2617, 1073744473, 2652, 2654, 1073744498, // NOLINT
+ 2676, 1073744517, 2701, 1073744527, 2705, 1073744531, 2728, 1073744554, // NOLINT
+ 2736, 1073744562, 2739, 1073744565, 2745, 2749, 2768, 1073744608, // NOLINT
+ 2785, 1073744645, 2828, 1073744655, 2832, 1073744659, 2856, 1073744682, // NOLINT
+ 2864, 1073744690, 2867, 1073744693, 2873, 2877, 1073744732, 2909, // NOLINT
+ 1073744735, 2913, 2929, 2947, 1073744773, 2954, 1073744782, 2960, // NOLINT
+ 1073744786, 2965, 1073744793, 2970, 2972, 1073744798, 2975, 1073744803, // NOLINT
+ 2980, 1073744808, 2986, 1073744814, 3001, 3024, 1073744901, 3084, // NOLINT
+ 1073744910, 3088, 1073744914, 3112, 1073744938, 3123, 1073744949, 3129, // NOLINT
+ 3133, 1073744984, 3161, 1073744992, 3169, 1073745029, 3212, 1073745038, // NOLINT
+ 3216, 1073745042, 3240, 1073745066, 3251, 1073745077, 3257, 3261, // NOLINT
+ 3294, 1073745120, 3297, 1073745137, 3314, 1073745157, 3340, 1073745166, // NOLINT
+ 3344, 1073745170, 3386, 3389, 3406, 1073745248, 3425, 1073745274, // NOLINT
+ 3455, 1073745285, 3478, 1073745306, 3505, 1073745331, 3515, 3517, // NOLINT
+ 1073745344, 3526, 1073745409, 3632, 1073745458, 3635, 1073745472, 3654, // NOLINT
+ 1073745537, 3714, 3716, 1073745543, 3720, 3722, 3725, 1073745556, // NOLINT
+ 3735, 1073745561, 3743, 1073745569, 3747, 3749, 3751, 1073745578, // NOLINT
+ 3755, 1073745581, 3760, 1073745586, 3763, 3773, 1073745600, 3780, // NOLINT
+ 3782, 1073745628, 3807, 3840, 1073745728, 3911, 1073745737, 3948, // NOLINT
+ 1073745800, 3980, 1073745920, 4138, 4159, 1073746000, 4181, 1073746010, // NOLINT
+ 4189, 4193, 1073746021, 4198, 1073746030, 4208, 1073746037, 4225, // NOLINT
+ 4238, 1073746080, 4293, 4295, 4301, 1073746128, 4346, 1073746172, // NOLINT
+ 4680, 1073746506, 4685, 1073746512, 4694, 4696, 1073746522, 4701, // NOLINT
+ 1073746528, 4744, 1073746570, 4749, 1073746576, 4784, 1073746610, 4789, // NOLINT
+ 1073746616, 4798, 4800, 1073746626, 4805, 1073746632, 4822, 1073746648, // NOLINT
+ 4880, 1073746706, 4885, 1073746712, 4954, 1073746816, 5007, 1073746848, // NOLINT
+ 5108, 1073746945, 5740, 1073747567, 5759, 1073747585, 5786, 1073747616, // NOLINT
+ 5866, 1073747694, 5872, 1073747712, 5900, 1073747726, 5905, 1073747744, // NOLINT
+ 5937, 1073747776, 5969, 1073747808, 5996, 1073747822, 6000, 1073747840, // NOLINT
+ 6067, 6103, 6108, 1073748000, 6263, 1073748096, 6312, 6314, // NOLINT
+ 1073748144, 6389, 1073748224, 6428, 1073748304, 6509, 1073748336, 6516, // NOLINT
+ 1073748352, 6571, 1073748417, 6599, 1073748480, 6678, 1073748512, 6740, // NOLINT
+ 6823, 1073748741, 6963, 1073748805, 6987, 1073748867, 7072, 1073748910, // NOLINT
+ 7087, 1073748922, 7141, 1073748992, 7203, 1073749069, 7247, 1073749082, // NOLINT
+ 7293, 1073749225, 7404, 1073749230, 7409, 1073749237, 7414, 1073749248, // NOLINT
+ 7615, 1073749504, 7957, 1073749784, 7965, 1073749792, 8005, 1073749832, // NOLINT
+ 8013, 1073749840, 8023, 8025, 8027, 8029, 1073749855, 8061, // NOLINT
+ 1073749888, 8116, 1073749942, 8124, 8126, 1073749954, 8132, 1073749958, // NOLINT
+ 8140, 1073749968, 8147, 1073749974, 8155, 1073749984, 8172, 1073750002, // NOLINT
+ 8180, 1073750006, 8188 }; // NOLINT
+static const uint16_t kLetterTable1Size = 87;
+static const int32_t kLetterTable1[87] = {
+ 113, 127, 1073741968, 156, 258, 263, 1073742090, 275, // NOLINT
277, 1073742105, 285, 292, 294, 296, 1073742122, 301, // NOLINT
1073742127, 313, 1073742140, 319, 1073742149, 329, 334, 1073742176, // NOLINT
- 388, 1073744896, 3118, 1073744944, 3166, 1073744992, 3180, 1073745012, // NOLINT
- 3191, 1073745024, 3300, 1073745152, 3365, 1073745200, 3429, 3439, // NOLINT
- 1073745280, 3478, 1073745312, 3494, 1073745320, 3502, 1073745328, 3510, // NOLINT
- 1073745336, 3518, 1073745344, 3526, 1073745352, 3534, 1073745360, 3542, // NOLINT
- 1073745368, 3550, 1073745925, 4103, 1073745953, 4137, 1073745969, 4149, // NOLINT
- 1073745976, 4156, 1073745985, 4246, 1073746077, 4255, 1073746081, 4346, // NOLINT
- 1073746172, 4351, 1073746181, 4396, 1073746225, 4494, 1073746336, 4535, // NOLINT
- 1073746416, 4607, 1073746944, 8191 }; // NOLINT
+ 392, 1073744896, 3118, 1073744944, 3166, 1073744992, 3300, 1073745131, // NOLINT
+ 3310, 1073745138, 3315, 1073745152, 3365, 3367, 3373, 1073745200, // NOLINT
+ 3431, 3439, 1073745280, 3478, 1073745312, 3494, 1073745320, 3502, // NOLINT
+ 1073745328, 3510, 1073745336, 3518, 1073745344, 3526, 1073745352, 3534, // NOLINT
+ 1073745360, 3542, 1073745368, 3550, 3631, 1073745925, 4103, 1073745953, // NOLINT
+ 4137, 1073745969, 4149, 1073745976, 4156, 1073745985, 4246, 1073746077, // NOLINT
+ 4255, 1073746081, 4346, 1073746172, 4351, 1073746181, 4397, 1073746225, // NOLINT
+ 4494, 1073746336, 4538, 1073746416, 4607, 1073746944, 8191 }; // NOLINT
static const uint16_t kLetterTable2Size = 4;
static const int32_t kLetterTable2[4] = {
1073741824, 3509, 1073745408, 8191 }; // NOLINT
@@ -601,23 +657,31 @@ static const int32_t kLetterTable3[2] = {
1073741824, 8191 }; // NOLINT
static const uint16_t kLetterTable4Size = 2;
static const int32_t kLetterTable4[2] = {
- 1073741824, 8123 }; // NOLINT
-static const uint16_t kLetterTable5Size = 16;
-static const int32_t kLetterTable5[16] = {
- 1073741824, 1164, 1073743639, 1818, 1073743872, 2049, 1073743875, 2053, // NOLINT
- 1073743879, 2058, 1073743884, 2082, 1073743936, 2163, 1073744896, 8191 }; // NOLINT
-static const uint16_t kLetterTable6Size = 2;
-static const int32_t kLetterTable6[2] = {
- 1073741824, 6051 }; // NOLINT
-static const uint16_t kLetterTable7Size = 50;
-static const int32_t kLetterTable7[50] = {
- 1073748224, 6701, 1073748528, 6762, 1073748592, 6873, 1073748736, 6918, // NOLINT
- 1073748755, 6935, 6941, 1073748767, 6952, 1073748778, 6966, 1073748792, // NOLINT
- 6972, 6974, 1073748800, 6977, 1073748803, 6980, 1073748806, 7089, // NOLINT
- 1073748947, 7485, 1073749328, 7567, 1073749394, 7623, 1073749488, 7675, // NOLINT
- 1073749616, 7796, 1073749622, 7932, 1073749793, 7994, 1073749825, 8026, // NOLINT
- 1073749862, 8126, 1073749954, 8135, 1073749962, 8143, 1073749970, 8151, // NOLINT
- 1073749978, 8156 }; // NOLINT
+ 1073741824, 8140 }; // NOLINT
+static const uint16_t kLetterTable5Size = 88;
+static const int32_t kLetterTable5[88] = {
+ 1073741824, 1164, 1073743056, 1277, 1073743104, 1548, 1073743376, 1567, // NOLINT
+ 1073743402, 1579, 1073743424, 1646, 1073743487, 1687, 1073743520, 1775, // NOLINT
+ 1073743639, 1823, 1073743650, 1928, 1073743755, 1934, 1073743760, 1939, // NOLINT
+ 1073743776, 1962, 1073743864, 2049, 1073743875, 2053, 1073743879, 2058, // NOLINT
+ 1073743884, 2082, 1073743936, 2163, 1073744002, 2227, 1073744114, 2295, // NOLINT
+ 2299, 1073744138, 2341, 1073744176, 2374, 1073744224, 2428, 1073744260, // NOLINT
+ 2482, 2511, 1073744384, 2600, 1073744448, 2626, 1073744452, 2635, // NOLINT
+ 1073744480, 2678, 2682, 1073744512, 2735, 2737, 1073744565, 2742, // NOLINT
+ 1073744569, 2749, 2752, 2754, 1073744603, 2781, 1073744608, 2794, // NOLINT
+ 1073744626, 2804, 1073744641, 2822, 1073744649, 2830, 1073744657, 2838, // NOLINT
+ 1073744672, 2854, 1073744680, 2862, 1073744832, 3042, 1073744896, 8191 }; // NOLINT
+static const uint16_t kLetterTable6Size = 6;
+static const int32_t kLetterTable6[6] = {
+ 1073741824, 6051, 1073747888, 6086, 1073747915, 6139 }; // NOLINT
+static const uint16_t kLetterTable7Size = 48;
+static const int32_t kLetterTable7[48] = {
+ 1073748224, 6765, 1073748592, 6873, 1073748736, 6918, 1073748755, 6935, // NOLINT
+ 6941, 1073748767, 6952, 1073748778, 6966, 1073748792, 6972, 6974, // NOLINT
+ 1073748800, 6977, 1073748803, 6980, 1073748806, 7089, 1073748947, 7485, // NOLINT
+ 1073749328, 7567, 1073749394, 7623, 1073749488, 7675, 1073749616, 7796, // NOLINT
+ 1073749622, 7932, 1073749793, 7994, 1073749825, 8026, 1073749862, 8126, // NOLINT
+ 1073749954, 8135, 1073749962, 8143, 1073749970, 8151, 1073749978, 8156 }; // NOLINT
bool Letter::Is(uchar c) {
int chunk_index = c >> 13;
switch (chunk_index) {
@@ -672,14 +736,19 @@ bool Space::Is(uchar c) {
// Number: point.category == 'Nd'
-static const uint16_t kNumberTable0Size = 44;
-static const int32_t kNumberTable0[44] = {
+static const uint16_t kNumberTable0Size = 56;
+static const int32_t kNumberTable0[56] = {
1073741872, 57, 1073743456, 1641, 1073743600, 1785, 1073743808, 1993, // NOLINT
1073744230, 2415, 1073744358, 2543, 1073744486, 2671, 1073744614, 2799, // NOLINT
1073744742, 2927, 1073744870, 3055, 1073744998, 3183, 1073745126, 3311, // NOLINT
1073745254, 3439, 1073745488, 3673, 1073745616, 3801, 1073745696, 3881, // NOLINT
- 1073745984, 4169, 1073747936, 6121, 1073747984, 6169, 1073748294, 6479, // NOLINT
- 1073748432, 6617, 1073748816, 7001 }; // NOLINT
+ 1073745984, 4169, 1073746064, 4249, 1073747936, 6121, 1073747984, 6169, // NOLINT
+ 1073748294, 6479, 1073748432, 6617, 1073748608, 6793, 1073748624, 6809, // NOLINT
+ 1073748816, 7001, 1073748912, 7097, 1073749056, 7241, 1073749072, 7257 }; // NOLINT
+static const uint16_t kNumberTable5Size = 12;
+static const int32_t kNumberTable5[12] = {
+ 1073743392, 1577, 1073744080, 2265, 1073744128, 2313, 1073744336, 2521, // NOLINT
+ 1073744464, 2649, 1073744880, 3065 }; // NOLINT
static const uint16_t kNumberTable7Size = 2;
static const int32_t kNumberTable7[2] = {
1073749776, 7961 }; // NOLINT
@@ -689,6 +758,9 @@ bool Number::Is(uchar c) {
case 0: return LookupPredicate(kNumberTable0,
kNumberTable0Size,
c);
+ case 5: return LookupPredicate(kNumberTable5,
+ kNumberTable5Size,
+ c);
case 7: return LookupPredicate(kNumberTable7,
kNumberTable7Size,
c);
@@ -740,44 +812,56 @@ bool LineTerminator::Is(uchar c) {
// CombiningMark: point.category in ['Mn', 'Mc']
-static const uint16_t kCombiningMarkTable0Size = 205;
-static const int32_t kCombiningMarkTable0[205] = {
- 1073742592, 879, 1073742979, 1158, 1073743249, 1469, 1471, 1073743297, // NOLINT
- 1474, 1073743300, 1477, 1479, 1073743376, 1557, 1073743435, 1630, // NOLINT
+static const uint16_t kCombiningMarkTable0Size = 258;
+static const int32_t kCombiningMarkTable0[258] = {
+ 1073742592, 879, 1073742979, 1159, 1073743249, 1469, 1471, 1073743297, // NOLINT
+ 1474, 1073743300, 1477, 1479, 1073743376, 1562, 1073743435, 1631, // NOLINT
1648, 1073743574, 1756, 1073743583, 1764, 1073743591, 1768, 1073743594, // NOLINT
1773, 1809, 1073743664, 1866, 1073743782, 1968, 1073743851, 2035, // NOLINT
- 1073744129, 2307, 2364, 1073744190, 2381, 1073744209, 2388, 1073744226, // NOLINT
- 2403, 1073744257, 2435, 2492, 1073744318, 2500, 1073744327, 2504, // NOLINT
- 1073744331, 2509, 2519, 1073744354, 2531, 1073744385, 2563, 2620, // NOLINT
- 1073744446, 2626, 1073744455, 2632, 1073744459, 2637, 1073744496, 2673, // NOLINT
- 1073744513, 2691, 2748, 1073744574, 2757, 1073744583, 2761, 1073744587, // NOLINT
- 2765, 1073744610, 2787, 1073744641, 2819, 2876, 1073744702, 2883, // NOLINT
- 1073744711, 2888, 1073744715, 2893, 1073744726, 2903, 2946, 1073744830, // NOLINT
- 3010, 1073744838, 3016, 1073744842, 3021, 3031, 1073744897, 3075, // NOLINT
- 1073744958, 3140, 1073744966, 3144, 1073744970, 3149, 1073744981, 3158, // NOLINT
- 1073745026, 3203, 3260, 1073745086, 3268, 1073745094, 3272, 1073745098, // NOLINT
- 3277, 1073745109, 3286, 1073745122, 3299, 1073745154, 3331, 1073745214, // NOLINT
- 3395, 1073745222, 3400, 1073745226, 3405, 3415, 1073745282, 3459, // NOLINT
- 3530, 1073745359, 3540, 3542, 1073745368, 3551, 1073745394, 3571, // NOLINT
- 3633, 1073745460, 3642, 1073745479, 3662, 3761, 1073745588, 3769, // NOLINT
- 1073745595, 3772, 1073745608, 3789, 1073745688, 3865, 3893, 3895, // NOLINT
- 3897, 1073745726, 3903, 1073745777, 3972, 1073745798, 3975, 1073745808, // NOLINT
- 3991, 1073745817, 4028, 4038, 1073745964, 4146, 1073745974, 4153, // NOLINT
- 1073746006, 4185, 4959, 1073747730, 5908, 1073747762, 5940, 1073747794, // NOLINT
- 5971, 1073747826, 6003, 1073747894, 6099, 6109, 1073747979, 6157, // NOLINT
- 6313, 1073748256, 6443, 1073748272, 6459, 1073748400, 6592, 1073748424, // NOLINT
- 6601, 1073748503, 6683, 1073748736, 6916, 1073748788, 6980, 1073748843, // NOLINT
- 7027, 1073749440, 7626, 1073749502, 7679 }; // NOLINT
-static const uint16_t kCombiningMarkTable1Size = 9;
-static const int32_t kCombiningMarkTable1[9] = {
- 1073742032, 220, 225, 1073742053, 239, 1073745962, 4143, 1073746073, // NOLINT
- 4250 }; // NOLINT
-static const uint16_t kCombiningMarkTable5Size = 5;
-static const int32_t kCombiningMarkTable5[5] = {
- 2050, 2054, 2059, 1073743907, 2087 }; // NOLINT
+ 1073743894, 2073, 1073743899, 2083, 1073743909, 2087, 1073743913, 2093, // NOLINT
+ 1073743961, 2139, 1073744100, 2302, 1073744128, 2307, 1073744186, 2364, // NOLINT
+ 1073744190, 2383, 1073744209, 2391, 1073744226, 2403, 1073744257, 2435, // NOLINT
+ 2492, 1073744318, 2500, 1073744327, 2504, 1073744331, 2509, 2519, // NOLINT
+ 1073744354, 2531, 1073744385, 2563, 2620, 1073744446, 2626, 1073744455, // NOLINT
+ 2632, 1073744459, 2637, 2641, 1073744496, 2673, 2677, 1073744513, // NOLINT
+ 2691, 2748, 1073744574, 2757, 1073744583, 2761, 1073744587, 2765, // NOLINT
+ 1073744610, 2787, 1073744641, 2819, 2876, 1073744702, 2884, 1073744711, // NOLINT
+ 2888, 1073744715, 2893, 1073744726, 2903, 1073744738, 2915, 2946, // NOLINT
+ 1073744830, 3010, 1073744838, 3016, 1073744842, 3021, 3031, 1073744897, // NOLINT
+ 3075, 1073744958, 3140, 1073744966, 3144, 1073744970, 3149, 1073744981, // NOLINT
+ 3158, 1073744994, 3171, 1073745026, 3203, 3260, 1073745086, 3268, // NOLINT
+ 1073745094, 3272, 1073745098, 3277, 1073745109, 3286, 1073745122, 3299, // NOLINT
+ 1073745154, 3331, 1073745214, 3396, 1073745222, 3400, 1073745226, 3405, // NOLINT
+ 3415, 1073745250, 3427, 1073745282, 3459, 3530, 1073745359, 3540, // NOLINT
+ 3542, 1073745368, 3551, 1073745394, 3571, 3633, 1073745460, 3642, // NOLINT
+ 1073745479, 3662, 3761, 1073745588, 3769, 1073745595, 3772, 1073745608, // NOLINT
+ 3789, 1073745688, 3865, 3893, 3895, 3897, 1073745726, 3903, // NOLINT
+ 1073745777, 3972, 1073745798, 3975, 1073745805, 3991, 1073745817, 4028, // NOLINT
+ 4038, 1073745963, 4158, 1073746006, 4185, 1073746014, 4192, 1073746018, // NOLINT
+ 4196, 1073746023, 4205, 1073746033, 4212, 1073746050, 4237, 4239, // NOLINT
+ 1073746074, 4253, 1073746781, 4959, 1073747730, 5908, 1073747762, 5940, // NOLINT
+ 1073747794, 5971, 1073747826, 6003, 1073747892, 6099, 6109, 1073747979, // NOLINT
+ 6157, 6313, 1073748256, 6443, 1073748272, 6459, 1073748400, 6592, // NOLINT
+ 1073748424, 6601, 1073748503, 6683, 1073748565, 6750, 1073748576, 6780, // NOLINT
+ 6783, 1073748736, 6916, 1073748788, 6980, 1073748843, 7027, 1073748864, // NOLINT
+ 7042, 1073748897, 7085, 1073748966, 7155, 1073749028, 7223, 1073749200, // NOLINT
+ 7378, 1073749204, 7400, 7405, 1073749234, 7412, 1073749440, 7654, // NOLINT
+ 1073749500, 7679 }; // NOLINT
+static const uint16_t kCombiningMarkTable1Size = 14;
+static const int32_t kCombiningMarkTable1[14] = {
+ 1073742032, 220, 225, 1073742053, 240, 1073745135, 3313, 3455, // NOLINT
+ 1073745376, 3583, 1073745962, 4143, 1073746073, 4250 }; // NOLINT
+static const uint16_t kCombiningMarkTable5Size = 47;
+static const int32_t kCombiningMarkTable5[47] = {
+ 1647, 1073743476, 1661, 1695, 1073743600, 1777, 2050, 2054, // NOLINT
+ 2059, 1073743907, 2087, 1073744000, 2177, 1073744052, 2244, 1073744096, // NOLINT
+ 2289, 1073744166, 2349, 1073744199, 2387, 1073744256, 2435, 1073744307, // NOLINT
+ 2496, 1073744425, 2614, 2627, 1073744460, 2637, 2683, 2736, // NOLINT
+ 1073744562, 2740, 1073744567, 2744, 1073744574, 2751, 2753, 1073744619, // NOLINT
+ 2799, 1073744629, 2806, 1073744867, 3050, 1073744876, 3053 }; // NOLINT
static const uint16_t kCombiningMarkTable7Size = 5;
static const int32_t kCombiningMarkTable7[5] = {
- 6942, 1073749504, 7695, 1073749536, 7715 }; // NOLINT
+ 6942, 1073749504, 7695, 1073749536, 7718 }; // NOLINT
bool CombiningMark::Is(uchar c) {
int chunk_index = c >> 13;
switch (chunk_index) {
@@ -826,8 +910,8 @@ bool ConnectorPunctuation::Is(uchar c) {
static const MultiCharacterSpecialCase<2> kToLowercaseMultiStrings0[2] = { // NOLINT
{{105, 775}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kToLowercaseTable0Size = 463; // NOLINT
-static const int32_t kToLowercaseTable0[926] = {
+static const uint16_t kToLowercaseTable0Size = 483; // NOLINT
+static const int32_t kToLowercaseTable0[966] = {
1073741889, 128, 90, 128, 1073742016, 128, 214, 128, 1073742040, 128, 222, 128, 256, 4, 258, 4, // NOLINT
260, 4, 262, 4, 264, 4, 266, 4, 268, 4, 270, 4, 272, 4, 274, 4, // NOLINT
276, 4, 278, 4, 280, 4, 282, 4, 284, 4, 286, 4, 288, 4, 290, 4, // NOLINT
@@ -850,22 +934,24 @@ static const int32_t kToLowercaseTable0[926] = {
542, 4, 544, -520, 546, 4, 548, 4, 550, 4, 552, 4, 554, 4, 556, 4, // NOLINT
558, 4, 560, 4, 562, 4, 570, 43180, 571, 4, 573, -652, 574, 43168, 577, 4, // NOLINT
579, -780, 580, 276, 581, 284, 582, 4, 584, 4, 586, 4, 588, 4, 590, 4, // NOLINT
- 902, 152, 1073742728, 148, 906, 148, 908, 256, 1073742734, 252, 911, 252, 1073742737, 128, 929, 128, // NOLINT
- 931, 6, 1073742756, 128, 939, 128, 984, 4, 986, 4, 988, 4, 990, 4, 992, 4, // NOLINT
- 994, 4, 996, 4, 998, 4, 1000, 4, 1002, 4, 1004, 4, 1006, 4, 1012, -240, // NOLINT
- 1015, 4, 1017, -28, 1018, 4, 1073742845, -520, 1023, -520, 1073742848, 320, 1039, 320, 1073742864, 128, // NOLINT
- 1071, 128, 1120, 4, 1122, 4, 1124, 4, 1126, 4, 1128, 4, 1130, 4, 1132, 4, // NOLINT
- 1134, 4, 1136, 4, 1138, 4, 1140, 4, 1142, 4, 1144, 4, 1146, 4, 1148, 4, // NOLINT
- 1150, 4, 1152, 4, 1162, 4, 1164, 4, 1166, 4, 1168, 4, 1170, 4, 1172, 4, // NOLINT
- 1174, 4, 1176, 4, 1178, 4, 1180, 4, 1182, 4, 1184, 4, 1186, 4, 1188, 4, // NOLINT
- 1190, 4, 1192, 4, 1194, 4, 1196, 4, 1198, 4, 1200, 4, 1202, 4, 1204, 4, // NOLINT
- 1206, 4, 1208, 4, 1210, 4, 1212, 4, 1214, 4, 1216, 60, 1217, 4, 1219, 4, // NOLINT
- 1221, 4, 1223, 4, 1225, 4, 1227, 4, 1229, 4, 1232, 4, 1234, 4, 1236, 4, // NOLINT
- 1238, 4, 1240, 4, 1242, 4, 1244, 4, 1246, 4, 1248, 4, 1250, 4, 1252, 4, // NOLINT
- 1254, 4, 1256, 4, 1258, 4, 1260, 4, 1262, 4, 1264, 4, 1266, 4, 1268, 4, // NOLINT
- 1270, 4, 1272, 4, 1274, 4, 1276, 4, 1278, 4, 1280, 4, 1282, 4, 1284, 4, // NOLINT
- 1286, 4, 1288, 4, 1290, 4, 1292, 4, 1294, 4, 1296, 4, 1298, 4, 1073743153, 192, // NOLINT
- 1366, 192, 1073746080, 29056, 4293, 29056, 7680, 4, 7682, 4, 7684, 4, 7686, 4, 7688, 4, // NOLINT
+ 880, 4, 882, 4, 886, 4, 902, 152, 1073742728, 148, 906, 148, 908, 256, 1073742734, 252, // NOLINT
+ 911, 252, 1073742737, 128, 929, 128, 931, 6, 1073742756, 128, 939, 128, 975, 32, 984, 4, // NOLINT
+ 986, 4, 988, 4, 990, 4, 992, 4, 994, 4, 996, 4, 998, 4, 1000, 4, // NOLINT
+ 1002, 4, 1004, 4, 1006, 4, 1012, -240, 1015, 4, 1017, -28, 1018, 4, 1073742845, -520, // NOLINT
+ 1023, -520, 1073742848, 320, 1039, 320, 1073742864, 128, 1071, 128, 1120, 4, 1122, 4, 1124, 4, // NOLINT
+ 1126, 4, 1128, 4, 1130, 4, 1132, 4, 1134, 4, 1136, 4, 1138, 4, 1140, 4, // NOLINT
+ 1142, 4, 1144, 4, 1146, 4, 1148, 4, 1150, 4, 1152, 4, 1162, 4, 1164, 4, // NOLINT
+ 1166, 4, 1168, 4, 1170, 4, 1172, 4, 1174, 4, 1176, 4, 1178, 4, 1180, 4, // NOLINT
+ 1182, 4, 1184, 4, 1186, 4, 1188, 4, 1190, 4, 1192, 4, 1194, 4, 1196, 4, // NOLINT
+ 1198, 4, 1200, 4, 1202, 4, 1204, 4, 1206, 4, 1208, 4, 1210, 4, 1212, 4, // NOLINT
+ 1214, 4, 1216, 60, 1217, 4, 1219, 4, 1221, 4, 1223, 4, 1225, 4, 1227, 4, // NOLINT
+ 1229, 4, 1232, 4, 1234, 4, 1236, 4, 1238, 4, 1240, 4, 1242, 4, 1244, 4, // NOLINT
+ 1246, 4, 1248, 4, 1250, 4, 1252, 4, 1254, 4, 1256, 4, 1258, 4, 1260, 4, // NOLINT
+ 1262, 4, 1264, 4, 1266, 4, 1268, 4, 1270, 4, 1272, 4, 1274, 4, 1276, 4, // NOLINT
+ 1278, 4, 1280, 4, 1282, 4, 1284, 4, 1286, 4, 1288, 4, 1290, 4, 1292, 4, // NOLINT
+ 1294, 4, 1296, 4, 1298, 4, 1300, 4, 1302, 4, 1304, 4, 1306, 4, 1308, 4, // NOLINT
+ 1310, 4, 1312, 4, 1314, 4, 1316, 4, 1318, 4, 1073743153, 192, 1366, 192, 1073746080, 29056, // NOLINT
+ 4293, 29056, 4295, 29056, 4301, 29056, 7680, 4, 7682, 4, 7684, 4, 7686, 4, 7688, 4, // NOLINT
7690, 4, 7692, 4, 7694, 4, 7696, 4, 7698, 4, 7700, 4, 7702, 4, 7704, 4, // NOLINT
7706, 4, 7708, 4, 7710, 4, 7712, 4, 7714, 4, 7716, 4, 7718, 4, 7720, 4, // NOLINT
7722, 4, 7724, 4, 7726, 4, 7728, 4, 7730, 4, 7732, 4, 7734, 4, 7736, 4, // NOLINT
@@ -874,33 +960,52 @@ static const int32_t kToLowercaseTable0[926] = {
7770, 4, 7772, 4, 7774, 4, 7776, 4, 7778, 4, 7780, 4, 7782, 4, 7784, 4, // NOLINT
7786, 4, 7788, 4, 7790, 4, 7792, 4, 7794, 4, 7796, 4, 7798, 4, 7800, 4, // NOLINT
7802, 4, 7804, 4, 7806, 4, 7808, 4, 7810, 4, 7812, 4, 7814, 4, 7816, 4, // NOLINT
- 7818, 4, 7820, 4, 7822, 4, 7824, 4, 7826, 4, 7828, 4, 7840, 4, 7842, 4, // NOLINT
- 7844, 4, 7846, 4, 7848, 4, 7850, 4, 7852, 4, 7854, 4, 7856, 4, 7858, 4, // NOLINT
- 7860, 4, 7862, 4, 7864, 4, 7866, 4, 7868, 4, 7870, 4, 7872, 4, 7874, 4, // NOLINT
- 7876, 4, 7878, 4, 7880, 4, 7882, 4, 7884, 4, 7886, 4, 7888, 4, 7890, 4, // NOLINT
- 7892, 4, 7894, 4, 7896, 4, 7898, 4, 7900, 4, 7902, 4, 7904, 4, 7906, 4, // NOLINT
- 7908, 4, 7910, 4, 7912, 4, 7914, 4, 7916, 4, 7918, 4, 7920, 4, 7922, 4, // NOLINT
- 7924, 4, 7926, 4, 7928, 4, 1073749768, -32, 7951, -32, 1073749784, -32, 7965, -32, 1073749800, -32, // NOLINT
- 7983, -32, 1073749816, -32, 7999, -32, 1073749832, -32, 8013, -32, 8025, -32, 8027, -32, 8029, -32, // NOLINT
- 8031, -32, 1073749864, -32, 8047, -32, 1073749896, -32, 8079, -32, 1073749912, -32, 8095, -32, 1073749928, -32, // NOLINT
- 8111, -32, 1073749944, -32, 8121, -32, 1073749946, -296, 8123, -296, 8124, -36, 1073749960, -344, 8139, -344, // NOLINT
- 8140, -36, 1073749976, -32, 8153, -32, 1073749978, -400, 8155, -400, 1073749992, -32, 8169, -32, 1073749994, -448, // NOLINT
- 8171, -448, 8172, -28, 1073750008, -512, 8185, -512, 1073750010, -504, 8187, -504, 8188, -36 }; // NOLINT
+ 7818, 4, 7820, 4, 7822, 4, 7824, 4, 7826, 4, 7828, 4, 7838, -30460, 7840, 4, // NOLINT
+ 7842, 4, 7844, 4, 7846, 4, 7848, 4, 7850, 4, 7852, 4, 7854, 4, 7856, 4, // NOLINT
+ 7858, 4, 7860, 4, 7862, 4, 7864, 4, 7866, 4, 7868, 4, 7870, 4, 7872, 4, // NOLINT
+ 7874, 4, 7876, 4, 7878, 4, 7880, 4, 7882, 4, 7884, 4, 7886, 4, 7888, 4, // NOLINT
+ 7890, 4, 7892, 4, 7894, 4, 7896, 4, 7898, 4, 7900, 4, 7902, 4, 7904, 4, // NOLINT
+ 7906, 4, 7908, 4, 7910, 4, 7912, 4, 7914, 4, 7916, 4, 7918, 4, 7920, 4, // NOLINT
+ 7922, 4, 7924, 4, 7926, 4, 7928, 4, 7930, 4, 7932, 4, 7934, 4, 1073749768, -32, // NOLINT
+ 7951, -32, 1073749784, -32, 7965, -32, 1073749800, -32, 7983, -32, 1073749816, -32, 7999, -32, 1073749832, -32, // NOLINT
+ 8013, -32, 8025, -32, 8027, -32, 8029, -32, 8031, -32, 1073749864, -32, 8047, -32, 1073749896, -32, // NOLINT
+ 8079, -32, 1073749912, -32, 8095, -32, 1073749928, -32, 8111, -32, 1073749944, -32, 8121, -32, 1073749946, -296, // NOLINT
+ 8123, -296, 8124, -36, 1073749960, -344, 8139, -344, 8140, -36, 1073749976, -32, 8153, -32, 1073749978, -400, // NOLINT
+ 8155, -400, 1073749992, -32, 8169, -32, 1073749994, -448, 8171, -448, 8172, -28, 1073750008, -512, 8185, -512, // NOLINT
+ 1073750010, -504, 8187, -504, 8188, -36 }; // NOLINT
static const uint16_t kToLowercaseMultiStrings0Size = 2; // NOLINT
static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kToLowercaseTable1Size = 69; // NOLINT
-static const int32_t kToLowercaseTable1[138] = {
+static const uint16_t kToLowercaseTable1Size = 79; // NOLINT
+static const int32_t kToLowercaseTable1[158] = {
294, -30068, 298, -33532, 299, -33048, 306, 112, 1073742176, 64, 367, 64, 387, 4, 1073743030, 104, // NOLINT
1231, 104, 1073744896, 192, 3118, 192, 3168, 4, 3170, -42972, 3171, -15256, 3172, -42908, 3175, 4, // NOLINT
- 3177, 4, 3179, 4, 3189, 4, 3200, 4, 3202, 4, 3204, 4, 3206, 4, 3208, 4, // NOLINT
- 3210, 4, 3212, 4, 3214, 4, 3216, 4, 3218, 4, 3220, 4, 3222, 4, 3224, 4, // NOLINT
- 3226, 4, 3228, 4, 3230, 4, 3232, 4, 3234, 4, 3236, 4, 3238, 4, 3240, 4, // NOLINT
- 3242, 4, 3244, 4, 3246, 4, 3248, 4, 3250, 4, 3252, 4, 3254, 4, 3256, 4, // NOLINT
- 3258, 4, 3260, 4, 3262, 4, 3264, 4, 3266, 4, 3268, 4, 3270, 4, 3272, 4, // NOLINT
- 3274, 4, 3276, 4, 3278, 4, 3280, 4, 3282, 4, 3284, 4, 3286, 4, 3288, 4, // NOLINT
- 3290, 4, 3292, 4, 3294, 4, 3296, 4, 3298, 4 }; // NOLINT
+ 3177, 4, 3179, 4, 3181, -43120, 3182, -42996, 3183, -43132, 3184, -43128, 3186, 4, 3189, 4, // NOLINT
+ 1073745022, -43260, 3199, -43260, 3200, 4, 3202, 4, 3204, 4, 3206, 4, 3208, 4, 3210, 4, // NOLINT
+ 3212, 4, 3214, 4, 3216, 4, 3218, 4, 3220, 4, 3222, 4, 3224, 4, 3226, 4, // NOLINT
+ 3228, 4, 3230, 4, 3232, 4, 3234, 4, 3236, 4, 3238, 4, 3240, 4, 3242, 4, // NOLINT
+ 3244, 4, 3246, 4, 3248, 4, 3250, 4, 3252, 4, 3254, 4, 3256, 4, 3258, 4, // NOLINT
+ 3260, 4, 3262, 4, 3264, 4, 3266, 4, 3268, 4, 3270, 4, 3272, 4, 3274, 4, // NOLINT
+ 3276, 4, 3278, 4, 3280, 4, 3282, 4, 3284, 4, 3286, 4, 3288, 4, 3290, 4, // NOLINT
+ 3292, 4, 3294, 4, 3296, 4, 3298, 4, 3307, 4, 3309, 4, 3314, 4 }; // NOLINT
static const uint16_t kToLowercaseMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kToLowercaseTable5Size = 91; // NOLINT
+static const int32_t kToLowercaseTable5[182] = {
+ 1600, 4, 1602, 4, 1604, 4, 1606, 4, 1608, 4, 1610, 4, 1612, 4, 1614, 4, // NOLINT
+ 1616, 4, 1618, 4, 1620, 4, 1622, 4, 1624, 4, 1626, 4, 1628, 4, 1630, 4, // NOLINT
+ 1632, 4, 1634, 4, 1636, 4, 1638, 4, 1640, 4, 1642, 4, 1644, 4, 1664, 4, // NOLINT
+ 1666, 4, 1668, 4, 1670, 4, 1672, 4, 1674, 4, 1676, 4, 1678, 4, 1680, 4, // NOLINT
+ 1682, 4, 1684, 4, 1686, 4, 1826, 4, 1828, 4, 1830, 4, 1832, 4, 1834, 4, // NOLINT
+ 1836, 4, 1838, 4, 1842, 4, 1844, 4, 1846, 4, 1848, 4, 1850, 4, 1852, 4, // NOLINT
+ 1854, 4, 1856, 4, 1858, 4, 1860, 4, 1862, 4, 1864, 4, 1866, 4, 1868, 4, // NOLINT
+ 1870, 4, 1872, 4, 1874, 4, 1876, 4, 1878, 4, 1880, 4, 1882, 4, 1884, 4, // NOLINT
+ 1886, 4, 1888, 4, 1890, 4, 1892, 4, 1894, 4, 1896, 4, 1898, 4, 1900, 4, // NOLINT
+ 1902, 4, 1913, 4, 1915, 4, 1917, -141328, 1918, 4, 1920, 4, 1922, 4, 1924, 4, // NOLINT
+ 1926, 4, 1931, 4, 1933, -169120, 1936, 4, 1938, 4, 1952, 4, 1954, 4, 1956, 4, // NOLINT
+ 1958, 4, 1960, 4, 1962, -169232 }; // NOLINT
+static const uint16_t kToLowercaseMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kToLowercaseMultiStrings7[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
static const uint16_t kToLowercaseTable7Size = 2; // NOLINT
@@ -927,6 +1032,13 @@ int ToLowercase::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kToLowercaseTable5,
+ kToLowercaseTable5Size,
+ kToLowercaseMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kToLowercaseTable7,
kToLowercaseTable7Size,
kToLowercaseMultiStrings7,
@@ -955,8 +1067,8 @@ static const MultiCharacterSpecialCase<3> kToUppercaseMultiStrings0[62] = { //
{{933, 776, 768}}, {{929, 787, kSentinel}}, {{933, 834, kSentinel}}, {{933, 776, 834}}, // NOLINT
{{8186, 921, kSentinel}}, {{937, 921, kSentinel}}, {{911, 921, kSentinel}}, {{937, 834, kSentinel}}, // NOLINT
{{937, 834, 921}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kToUppercaseTable0Size = 554; // NOLINT
-static const int32_t kToUppercaseTable0[1108] = {
+static const uint16_t kToUppercaseTable0Size = 580; // NOLINT
+static const int32_t kToUppercaseTable0[1160] = {
1073741921, -128, 122, -128, 181, 2972, 223, 1, 1073742048, -128, 246, -128, 1073742072, -128, 254, -128, // NOLINT
255, 484, 257, -4, 259, -4, 261, -4, 263, -4, 265, -4, 267, -4, 269, -4, // NOLINT
271, -4, 273, -4, 275, -4, 277, -4, 279, -4, 281, -4, 283, -4, 285, -4, // NOLINT
@@ -976,72 +1088,92 @@ static const int32_t kToUppercaseTable0[1108] = {
517, -4, 519, -4, 521, -4, 523, -4, 525, -4, 527, -4, 529, -4, 531, -4, // NOLINT
533, -4, 535, -4, 537, -4, 539, -4, 541, -4, 543, -4, 547, -4, 549, -4, // NOLINT
551, -4, 553, -4, 555, -4, 557, -4, 559, -4, 561, -4, 563, -4, 572, -4, // NOLINT
- 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, 591, -4, 595, -840, 596, -824, // NOLINT
- 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, 611, -828, 616, -836, 617, -844, // NOLINT
- 619, 42972, 623, -844, 626, -852, 629, -856, 637, 42908, 640, -872, 643, -872, 648, -872, // NOLINT
- 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, 837, 336, 1073742715, 520, 893, 520, // NOLINT
- 912, 13, 940, -152, 1073742765, -148, 943, -148, 944, 17, 1073742769, -128, 961, -128, 962, -124, // NOLINT
- 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, 976, -248, 977, -228, 981, -188, // NOLINT
- 982, -216, 985, -4, 987, -4, 989, -4, 991, -4, 993, -4, 995, -4, 997, -4, // NOLINT
- 999, -4, 1001, -4, 1003, -4, 1005, -4, 1007, -4, 1008, -344, 1009, -320, 1010, 28, // NOLINT
- 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, // NOLINT
- 1123, -4, 1125, -4, 1127, -4, 1129, -4, 1131, -4, 1133, -4, 1135, -4, 1137, -4, // NOLINT
- 1139, -4, 1141, -4, 1143, -4, 1145, -4, 1147, -4, 1149, -4, 1151, -4, 1153, -4, // NOLINT
- 1163, -4, 1165, -4, 1167, -4, 1169, -4, 1171, -4, 1173, -4, 1175, -4, 1177, -4, // NOLINT
- 1179, -4, 1181, -4, 1183, -4, 1185, -4, 1187, -4, 1189, -4, 1191, -4, 1193, -4, // NOLINT
- 1195, -4, 1197, -4, 1199, -4, 1201, -4, 1203, -4, 1205, -4, 1207, -4, 1209, -4, // NOLINT
- 1211, -4, 1213, -4, 1215, -4, 1218, -4, 1220, -4, 1222, -4, 1224, -4, 1226, -4, // NOLINT
- 1228, -4, 1230, -4, 1231, -60, 1233, -4, 1235, -4, 1237, -4, 1239, -4, 1241, -4, // NOLINT
- 1243, -4, 1245, -4, 1247, -4, 1249, -4, 1251, -4, 1253, -4, 1255, -4, 1257, -4, // NOLINT
- 1259, -4, 1261, -4, 1263, -4, 1265, -4, 1267, -4, 1269, -4, 1271, -4, 1273, -4, // NOLINT
- 1275, -4, 1277, -4, 1279, -4, 1281, -4, 1283, -4, 1285, -4, 1287, -4, 1289, -4, // NOLINT
- 1291, -4, 1293, -4, 1295, -4, 1297, -4, 1299, -4, 1073743201, -192, 1414, -192, 1415, 21, // NOLINT
- 7549, 15256, 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, // NOLINT
- 7695, -4, 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, // NOLINT
- 7711, -4, 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, // NOLINT
- 7727, -4, 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, // NOLINT
- 7743, -4, 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, // NOLINT
- 7759, -4, 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, // NOLINT
- 7775, -4, 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, // NOLINT
- 7791, -4, 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, // NOLINT
- 7807, -4, 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, // NOLINT
- 7823, -4, 7825, -4, 7827, -4, 7829, -4, 7830, 25, 7831, 29, 7832, 33, 7833, 37, // NOLINT
- 7834, 41, 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, 7849, -4, 7851, -4, // NOLINT
- 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, 7865, -4, 7867, -4, // NOLINT
- 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, 7881, -4, 7883, -4, // NOLINT
- 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, 7897, -4, 7899, -4, // NOLINT
- 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, 7913, -4, 7915, -4, // NOLINT
- 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, 7929, -4, 1073749760, 32, // NOLINT
- 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, 1073749808, 32, 7991, 32, 1073749824, 32, // NOLINT
- 8005, 32, 8016, 45, 8017, 32, 8018, 49, 8019, 32, 8020, 53, 8021, 32, 8022, 57, // NOLINT
- 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, 1073749878, 400, // NOLINT
- 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, 8064, 61, // NOLINT
- 8065, 65, 8066, 69, 8067, 73, 8068, 77, 8069, 81, 8070, 85, 8071, 89, 8072, 61, // NOLINT
- 8073, 65, 8074, 69, 8075, 73, 8076, 77, 8077, 81, 8078, 85, 8079, 89, 8080, 93, // NOLINT
- 8081, 97, 8082, 101, 8083, 105, 8084, 109, 8085, 113, 8086, 117, 8087, 121, 8088, 93, // NOLINT
- 8089, 97, 8090, 101, 8091, 105, 8092, 109, 8093, 113, 8094, 117, 8095, 121, 8096, 125, // NOLINT
- 8097, 129, 8098, 133, 8099, 137, 8100, 141, 8101, 145, 8102, 149, 8103, 153, 8104, 125, // NOLINT
- 8105, 129, 8106, 133, 8107, 137, 8108, 141, 8109, 145, 8110, 149, 8111, 153, 1073749936, 32, // NOLINT
- 8113, 32, 8114, 157, 8115, 161, 8116, 165, 8118, 169, 8119, 173, 8124, 161, 8126, -28820, // NOLINT
- 8130, 177, 8131, 181, 8132, 185, 8134, 189, 8135, 193, 8140, 181, 1073749968, 32, 8145, 32, // NOLINT
- 8146, 197, 8147, 13, 8150, 201, 8151, 205, 1073749984, 32, 8161, 32, 8162, 209, 8163, 17, // NOLINT
- 8164, 213, 8165, 28, 8166, 217, 8167, 221, 8178, 225, 8179, 229, 8180, 233, 8182, 237, // NOLINT
- 8183, 241, 8188, 229 }; // NOLINT
+ 1073742399, 43260, 576, 43260, 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, 591, -4, // NOLINT
+ 592, 43132, 593, 43120, 594, 43128, 595, -840, 596, -824, 1073742422, -820, 599, -820, 601, -808, // NOLINT
+ 603, -812, 608, -820, 611, -828, 613, 169120, 614, 169232, 616, -836, 617, -844, 619, 42972, // NOLINT
+ 623, -844, 625, 42996, 626, -852, 629, -856, 637, 42908, 640, -872, 643, -872, 648, -872, // NOLINT
+ 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, 837, 336, 881, -4, 883, -4, // NOLINT
+ 887, -4, 1073742715, 520, 893, 520, 912, 13, 940, -152, 1073742765, -148, 943, -148, 944, 17, // NOLINT
+ 1073742769, -128, 961, -128, 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, // NOLINT
+ 976, -248, 977, -228, 981, -188, 982, -216, 983, -32, 985, -4, 987, -4, 989, -4, // NOLINT
+ 991, -4, 993, -4, 995, -4, 997, -4, 999, -4, 1001, -4, 1003, -4, 1005, -4, // NOLINT
+ 1007, -4, 1008, -344, 1009, -320, 1010, 28, 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, // NOLINT
+ 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, 1123, -4, 1125, -4, 1127, -4, 1129, -4, // NOLINT
+ 1131, -4, 1133, -4, 1135, -4, 1137, -4, 1139, -4, 1141, -4, 1143, -4, 1145, -4, // NOLINT
+ 1147, -4, 1149, -4, 1151, -4, 1153, -4, 1163, -4, 1165, -4, 1167, -4, 1169, -4, // NOLINT
+ 1171, -4, 1173, -4, 1175, -4, 1177, -4, 1179, -4, 1181, -4, 1183, -4, 1185, -4, // NOLINT
+ 1187, -4, 1189, -4, 1191, -4, 1193, -4, 1195, -4, 1197, -4, 1199, -4, 1201, -4, // NOLINT
+ 1203, -4, 1205, -4, 1207, -4, 1209, -4, 1211, -4, 1213, -4, 1215, -4, 1218, -4, // NOLINT
+ 1220, -4, 1222, -4, 1224, -4, 1226, -4, 1228, -4, 1230, -4, 1231, -60, 1233, -4, // NOLINT
+ 1235, -4, 1237, -4, 1239, -4, 1241, -4, 1243, -4, 1245, -4, 1247, -4, 1249, -4, // NOLINT
+ 1251, -4, 1253, -4, 1255, -4, 1257, -4, 1259, -4, 1261, -4, 1263, -4, 1265, -4, // NOLINT
+ 1267, -4, 1269, -4, 1271, -4, 1273, -4, 1275, -4, 1277, -4, 1279, -4, 1281, -4, // NOLINT
+ 1283, -4, 1285, -4, 1287, -4, 1289, -4, 1291, -4, 1293, -4, 1295, -4, 1297, -4, // NOLINT
+ 1299, -4, 1301, -4, 1303, -4, 1305, -4, 1307, -4, 1309, -4, 1311, -4, 1313, -4, // NOLINT
+ 1315, -4, 1317, -4, 1319, -4, 1073743201, -192, 1414, -192, 1415, 21, 7545, 141328, 7549, 15256, // NOLINT
+ 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, 7695, -4, // NOLINT
+ 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, 7711, -4, // NOLINT
+ 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, 7727, -4, // NOLINT
+ 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, 7743, -4, // NOLINT
+ 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, 7759, -4, // NOLINT
+ 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, 7775, -4, // NOLINT
+ 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, 7791, -4, // NOLINT
+ 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, 7807, -4, // NOLINT
+ 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, 7823, -4, // NOLINT
+ 7825, -4, 7827, -4, 7829, -4, 7830, 25, 7831, 29, 7832, 33, 7833, 37, 7834, 41, // NOLINT
+ 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, 7849, -4, 7851, -4, 7853, -4, // NOLINT
+ 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, 7865, -4, 7867, -4, 7869, -4, // NOLINT
+ 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, 7881, -4, 7883, -4, 7885, -4, // NOLINT
+ 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, 7897, -4, 7899, -4, 7901, -4, // NOLINT
+ 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, 7913, -4, 7915, -4, 7917, -4, // NOLINT
+ 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, 7929, -4, 7931, -4, 7933, -4, // NOLINT
+ 7935, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, 1073749808, 32, // NOLINT
+ 7991, 32, 1073749824, 32, 8005, 32, 8016, 45, 8017, 32, 8018, 49, 8019, 32, 8020, 53, // NOLINT
+ 8021, 32, 8022, 57, 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, // NOLINT
+ 8053, 344, 1073749878, 400, 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, // NOLINT
+ 8061, 504, 8064, 61, 8065, 65, 8066, 69, 8067, 73, 8068, 77, 8069, 81, 8070, 85, // NOLINT
+ 8071, 89, 8072, 61, 8073, 65, 8074, 69, 8075, 73, 8076, 77, 8077, 81, 8078, 85, // NOLINT
+ 8079, 89, 8080, 93, 8081, 97, 8082, 101, 8083, 105, 8084, 109, 8085, 113, 8086, 117, // NOLINT
+ 8087, 121, 8088, 93, 8089, 97, 8090, 101, 8091, 105, 8092, 109, 8093, 113, 8094, 117, // NOLINT
+ 8095, 121, 8096, 125, 8097, 129, 8098, 133, 8099, 137, 8100, 141, 8101, 145, 8102, 149, // NOLINT
+ 8103, 153, 8104, 125, 8105, 129, 8106, 133, 8107, 137, 8108, 141, 8109, 145, 8110, 149, // NOLINT
+ 8111, 153, 1073749936, 32, 8113, 32, 8114, 157, 8115, 161, 8116, 165, 8118, 169, 8119, 173, // NOLINT
+ 8124, 161, 8126, -28820, 8130, 177, 8131, 181, 8132, 185, 8134, 189, 8135, 193, 8140, 181, // NOLINT
+ 1073749968, 32, 8145, 32, 8146, 197, 8147, 13, 8150, 201, 8151, 205, 1073749984, 32, 8161, 32, // NOLINT
+ 8162, 209, 8163, 17, 8164, 213, 8165, 28, 8166, 217, 8167, 221, 8178, 225, 8179, 229, // NOLINT
+ 8180, 233, 8182, 237, 8183, 241, 8188, 229 }; // NOLINT
static const uint16_t kToUppercaseMultiStrings0Size = 62; // NOLINT
static const MultiCharacterSpecialCase<1> kToUppercaseMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kToUppercaseTable1Size = 67; // NOLINT
-static const int32_t kToUppercaseTable1[134] = {
+static const uint16_t kToUppercaseTable1Size = 73; // NOLINT
+static const int32_t kToUppercaseTable1[146] = {
334, -112, 1073742192, -64, 383, -64, 388, -4, 1073743056, -104, 1257, -104, 1073744944, -192, 3166, -192, // NOLINT
- 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3190, -4, 3201, -4, // NOLINT
- 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, 3217, -4, // NOLINT
- 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, 3233, -4, // NOLINT
- 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, 3249, -4, // NOLINT
- 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, 3265, -4, // NOLINT
- 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, 3281, -4, // NOLINT
- 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, 3297, -4, // NOLINT
- 3299, -4, 1073745152, -29056, 3365, -29056 }; // NOLINT
+ 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3187, -4, 3190, -4, // NOLINT
+ 3201, -4, 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, // NOLINT
+ 3217, -4, 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, // NOLINT
+ 3233, -4, 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, // NOLINT
+ 3249, -4, 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, // NOLINT
+ 3265, -4, 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, // NOLINT
+ 3281, -4, 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, // NOLINT
+ 3297, -4, 3299, -4, 3308, -4, 3310, -4, 3315, -4, 1073745152, -29056, 3365, -29056, 3367, -29056, // NOLINT
+ 3373, -29056 }; // NOLINT
static const uint16_t kToUppercaseMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kToUppercaseMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kToUppercaseTable5Size = 88; // NOLINT
+static const int32_t kToUppercaseTable5[176] = {
+ 1601, -4, 1603, -4, 1605, -4, 1607, -4, 1609, -4, 1611, -4, 1613, -4, 1615, -4, // NOLINT
+ 1617, -4, 1619, -4, 1621, -4, 1623, -4, 1625, -4, 1627, -4, 1629, -4, 1631, -4, // NOLINT
+ 1633, -4, 1635, -4, 1637, -4, 1639, -4, 1641, -4, 1643, -4, 1645, -4, 1665, -4, // NOLINT
+ 1667, -4, 1669, -4, 1671, -4, 1673, -4, 1675, -4, 1677, -4, 1679, -4, 1681, -4, // NOLINT
+ 1683, -4, 1685, -4, 1687, -4, 1827, -4, 1829, -4, 1831, -4, 1833, -4, 1835, -4, // NOLINT
+ 1837, -4, 1839, -4, 1843, -4, 1845, -4, 1847, -4, 1849, -4, 1851, -4, 1853, -4, // NOLINT
+ 1855, -4, 1857, -4, 1859, -4, 1861, -4, 1863, -4, 1865, -4, 1867, -4, 1869, -4, // NOLINT
+ 1871, -4, 1873, -4, 1875, -4, 1877, -4, 1879, -4, 1881, -4, 1883, -4, 1885, -4, // NOLINT
+ 1887, -4, 1889, -4, 1891, -4, 1893, -4, 1895, -4, 1897, -4, 1899, -4, 1901, -4, // NOLINT
+ 1903, -4, 1914, -4, 1916, -4, 1919, -4, 1921, -4, 1923, -4, 1925, -4, 1927, -4, // NOLINT
+ 1932, -4, 1937, -4, 1939, -4, 1953, -4, 1955, -4, 1957, -4, 1959, -4, 1961, -4 }; // NOLINT
+static const uint16_t kToUppercaseMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<3> kToUppercaseMultiStrings7[12] = { // NOLINT
{{70, 70, kSentinel}}, {{70, 73, kSentinel}}, {{70, 76, kSentinel}}, {{70, 70, 73}}, // NOLINT
{{70, 70, 76}}, {{83, 84, kSentinel}}, {{1348, 1350, kSentinel}}, {{1348, 1333, kSentinel}}, // NOLINT
@@ -1071,6 +1203,13 @@ int ToUppercase::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kToUppercaseTable5,
+ kToUppercaseTable5Size,
+ kToUppercaseMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kToUppercaseTable7,
kToUppercaseTable7Size,
kToUppercaseMultiStrings7,
@@ -1084,8 +1223,8 @@ int ToUppercase::Convert(uchar c,
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings0[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262CanonicalizeTable0Size = 462; // NOLINT
-static const int32_t kEcma262CanonicalizeTable0[924] = {
+static const uint16_t kEcma262CanonicalizeTable0Size = 488; // NOLINT
+static const int32_t kEcma262CanonicalizeTable0[976] = {
1073741921, -128, 122, -128, 181, 2972, 1073742048, -128, 246, -128, 1073742072, -128, 254, -128, 255, 484, // NOLINT
257, -4, 259, -4, 261, -4, 263, -4, 265, -4, 267, -4, 269, -4, 271, -4, // NOLINT
273, -4, 275, -4, 277, -4, 279, -4, 281, -4, 283, -4, 285, -4, 287, -4, // NOLINT
@@ -1104,61 +1243,81 @@ static const int32_t kEcma262CanonicalizeTable0[924] = {
511, -4, 513, -4, 515, -4, 517, -4, 519, -4, 521, -4, 523, -4, 525, -4, // NOLINT
527, -4, 529, -4, 531, -4, 533, -4, 535, -4, 537, -4, 539, -4, 541, -4, // NOLINT
543, -4, 547, -4, 549, -4, 551, -4, 553, -4, 555, -4, 557, -4, 559, -4, // NOLINT
- 561, -4, 563, -4, 572, -4, 578, -4, 583, -4, 585, -4, 587, -4, 589, -4, // NOLINT
- 591, -4, 595, -840, 596, -824, 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, // NOLINT
- 611, -828, 616, -836, 617, -844, 619, 42972, 623, -844, 626, -852, 629, -856, 637, 42908, // NOLINT
+ 561, -4, 563, -4, 572, -4, 1073742399, 43260, 576, 43260, 578, -4, 583, -4, 585, -4, // NOLINT
+ 587, -4, 589, -4, 591, -4, 592, 43132, 593, 43120, 594, 43128, 595, -840, 596, -824, // NOLINT
+ 1073742422, -820, 599, -820, 601, -808, 603, -812, 608, -820, 611, -828, 613, 169120, 614, 169232, // NOLINT
+ 616, -836, 617, -844, 619, 42972, 623, -844, 625, 42996, 626, -852, 629, -856, 637, 42908, // NOLINT
640, -872, 643, -872, 648, -872, 649, -276, 1073742474, -868, 651, -868, 652, -284, 658, -876, // NOLINT
- 837, 336, 1073742715, 520, 893, 520, 940, -152, 1073742765, -148, 943, -148, 1073742769, -128, 961, -128, // NOLINT
- 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, 974, -252, 976, -248, 977, -228, // NOLINT
- 981, -188, 982, -216, 985, -4, 987, -4, 989, -4, 991, -4, 993, -4, 995, -4, // NOLINT
- 997, -4, 999, -4, 1001, -4, 1003, -4, 1005, -4, 1007, -4, 1008, -344, 1009, -320, // NOLINT
- 1010, 28, 1013, -384, 1016, -4, 1019, -4, 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, // NOLINT
- 1121, -4, 1123, -4, 1125, -4, 1127, -4, 1129, -4, 1131, -4, 1133, -4, 1135, -4, // NOLINT
- 1137, -4, 1139, -4, 1141, -4, 1143, -4, 1145, -4, 1147, -4, 1149, -4, 1151, -4, // NOLINT
- 1153, -4, 1163, -4, 1165, -4, 1167, -4, 1169, -4, 1171, -4, 1173, -4, 1175, -4, // NOLINT
- 1177, -4, 1179, -4, 1181, -4, 1183, -4, 1185, -4, 1187, -4, 1189, -4, 1191, -4, // NOLINT
- 1193, -4, 1195, -4, 1197, -4, 1199, -4, 1201, -4, 1203, -4, 1205, -4, 1207, -4, // NOLINT
- 1209, -4, 1211, -4, 1213, -4, 1215, -4, 1218, -4, 1220, -4, 1222, -4, 1224, -4, // NOLINT
- 1226, -4, 1228, -4, 1230, -4, 1231, -60, 1233, -4, 1235, -4, 1237, -4, 1239, -4, // NOLINT
- 1241, -4, 1243, -4, 1245, -4, 1247, -4, 1249, -4, 1251, -4, 1253, -4, 1255, -4, // NOLINT
- 1257, -4, 1259, -4, 1261, -4, 1263, -4, 1265, -4, 1267, -4, 1269, -4, 1271, -4, // NOLINT
- 1273, -4, 1275, -4, 1277, -4, 1279, -4, 1281, -4, 1283, -4, 1285, -4, 1287, -4, // NOLINT
- 1289, -4, 1291, -4, 1293, -4, 1295, -4, 1297, -4, 1299, -4, 1073743201, -192, 1414, -192, // NOLINT
- 7549, 15256, 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, // NOLINT
- 7695, -4, 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, // NOLINT
- 7711, -4, 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, // NOLINT
- 7727, -4, 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, // NOLINT
- 7743, -4, 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, // NOLINT
- 7759, -4, 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, // NOLINT
- 7775, -4, 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, // NOLINT
- 7791, -4, 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, // NOLINT
- 7807, -4, 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, // NOLINT
- 7823, -4, 7825, -4, 7827, -4, 7829, -4, 7835, -236, 7841, -4, 7843, -4, 7845, -4, // NOLINT
- 7847, -4, 7849, -4, 7851, -4, 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, // NOLINT
- 7863, -4, 7865, -4, 7867, -4, 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, // NOLINT
- 7879, -4, 7881, -4, 7883, -4, 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, // NOLINT
- 7895, -4, 7897, -4, 7899, -4, 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, // NOLINT
- 7911, -4, 7913, -4, 7915, -4, 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, // NOLINT
- 7927, -4, 7929, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, 1073749792, 32, 7975, 32, // NOLINT
- 1073749808, 32, 7991, 32, 1073749824, 32, 8005, 32, 8017, 32, 8019, 32, 8021, 32, 8023, 32, // NOLINT
- 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, 1073749878, 400, 8055, 400, // NOLINT
- 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, 1073749936, 32, 8113, 32, // NOLINT
- 8126, -28820, 1073749968, 32, 8145, 32, 1073749984, 32, 8161, 32, 8165, 28 }; // NOLINT
+ 837, 336, 881, -4, 883, -4, 887, -4, 1073742715, 520, 893, 520, 940, -152, 1073742765, -148, // NOLINT
+ 943, -148, 1073742769, -128, 961, -128, 962, -124, 1073742787, -128, 971, -128, 972, -256, 1073742797, -252, // NOLINT
+ 974, -252, 976, -248, 977, -228, 981, -188, 982, -216, 983, -32, 985, -4, 987, -4, // NOLINT
+ 989, -4, 991, -4, 993, -4, 995, -4, 997, -4, 999, -4, 1001, -4, 1003, -4, // NOLINT
+ 1005, -4, 1007, -4, 1008, -344, 1009, -320, 1010, 28, 1013, -384, 1016, -4, 1019, -4, // NOLINT
+ 1073742896, -128, 1103, -128, 1073742928, -320, 1119, -320, 1121, -4, 1123, -4, 1125, -4, 1127, -4, // NOLINT
+ 1129, -4, 1131, -4, 1133, -4, 1135, -4, 1137, -4, 1139, -4, 1141, -4, 1143, -4, // NOLINT
+ 1145, -4, 1147, -4, 1149, -4, 1151, -4, 1153, -4, 1163, -4, 1165, -4, 1167, -4, // NOLINT
+ 1169, -4, 1171, -4, 1173, -4, 1175, -4, 1177, -4, 1179, -4, 1181, -4, 1183, -4, // NOLINT
+ 1185, -4, 1187, -4, 1189, -4, 1191, -4, 1193, -4, 1195, -4, 1197, -4, 1199, -4, // NOLINT
+ 1201, -4, 1203, -4, 1205, -4, 1207, -4, 1209, -4, 1211, -4, 1213, -4, 1215, -4, // NOLINT
+ 1218, -4, 1220, -4, 1222, -4, 1224, -4, 1226, -4, 1228, -4, 1230, -4, 1231, -60, // NOLINT
+ 1233, -4, 1235, -4, 1237, -4, 1239, -4, 1241, -4, 1243, -4, 1245, -4, 1247, -4, // NOLINT
+ 1249, -4, 1251, -4, 1253, -4, 1255, -4, 1257, -4, 1259, -4, 1261, -4, 1263, -4, // NOLINT
+ 1265, -4, 1267, -4, 1269, -4, 1271, -4, 1273, -4, 1275, -4, 1277, -4, 1279, -4, // NOLINT
+ 1281, -4, 1283, -4, 1285, -4, 1287, -4, 1289, -4, 1291, -4, 1293, -4, 1295, -4, // NOLINT
+ 1297, -4, 1299, -4, 1301, -4, 1303, -4, 1305, -4, 1307, -4, 1309, -4, 1311, -4, // NOLINT
+ 1313, -4, 1315, -4, 1317, -4, 1319, -4, 1073743201, -192, 1414, -192, 7545, 141328, 7549, 15256, // NOLINT
+ 7681, -4, 7683, -4, 7685, -4, 7687, -4, 7689, -4, 7691, -4, 7693, -4, 7695, -4, // NOLINT
+ 7697, -4, 7699, -4, 7701, -4, 7703, -4, 7705, -4, 7707, -4, 7709, -4, 7711, -4, // NOLINT
+ 7713, -4, 7715, -4, 7717, -4, 7719, -4, 7721, -4, 7723, -4, 7725, -4, 7727, -4, // NOLINT
+ 7729, -4, 7731, -4, 7733, -4, 7735, -4, 7737, -4, 7739, -4, 7741, -4, 7743, -4, // NOLINT
+ 7745, -4, 7747, -4, 7749, -4, 7751, -4, 7753, -4, 7755, -4, 7757, -4, 7759, -4, // NOLINT
+ 7761, -4, 7763, -4, 7765, -4, 7767, -4, 7769, -4, 7771, -4, 7773, -4, 7775, -4, // NOLINT
+ 7777, -4, 7779, -4, 7781, -4, 7783, -4, 7785, -4, 7787, -4, 7789, -4, 7791, -4, // NOLINT
+ 7793, -4, 7795, -4, 7797, -4, 7799, -4, 7801, -4, 7803, -4, 7805, -4, 7807, -4, // NOLINT
+ 7809, -4, 7811, -4, 7813, -4, 7815, -4, 7817, -4, 7819, -4, 7821, -4, 7823, -4, // NOLINT
+ 7825, -4, 7827, -4, 7829, -4, 7835, -236, 7841, -4, 7843, -4, 7845, -4, 7847, -4, // NOLINT
+ 7849, -4, 7851, -4, 7853, -4, 7855, -4, 7857, -4, 7859, -4, 7861, -4, 7863, -4, // NOLINT
+ 7865, -4, 7867, -4, 7869, -4, 7871, -4, 7873, -4, 7875, -4, 7877, -4, 7879, -4, // NOLINT
+ 7881, -4, 7883, -4, 7885, -4, 7887, -4, 7889, -4, 7891, -4, 7893, -4, 7895, -4, // NOLINT
+ 7897, -4, 7899, -4, 7901, -4, 7903, -4, 7905, -4, 7907, -4, 7909, -4, 7911, -4, // NOLINT
+ 7913, -4, 7915, -4, 7917, -4, 7919, -4, 7921, -4, 7923, -4, 7925, -4, 7927, -4, // NOLINT
+ 7929, -4, 7931, -4, 7933, -4, 7935, -4, 1073749760, 32, 7943, 32, 1073749776, 32, 7957, 32, // NOLINT
+ 1073749792, 32, 7975, 32, 1073749808, 32, 7991, 32, 1073749824, 32, 8005, 32, 8017, 32, 8019, 32, // NOLINT
+ 8021, 32, 8023, 32, 1073749856, 32, 8039, 32, 1073749872, 296, 8049, 296, 1073749874, 344, 8053, 344, // NOLINT
+ 1073749878, 400, 8055, 400, 1073749880, 512, 8057, 512, 1073749882, 448, 8059, 448, 1073749884, 504, 8061, 504, // NOLINT
+ 1073749936, 32, 8113, 32, 8126, -28820, 1073749968, 32, 8145, 32, 1073749984, 32, 8161, 32, 8165, 28 }; // NOLINT
static const uint16_t kEcma262CanonicalizeMultiStrings0Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings1[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262CanonicalizeTable1Size = 67; // NOLINT
-static const int32_t kEcma262CanonicalizeTable1[134] = {
+static const uint16_t kEcma262CanonicalizeTable1Size = 73; // NOLINT
+static const int32_t kEcma262CanonicalizeTable1[146] = {
334, -112, 1073742192, -64, 383, -64, 388, -4, 1073743056, -104, 1257, -104, 1073744944, -192, 3166, -192, // NOLINT
- 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3190, -4, 3201, -4, // NOLINT
- 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, 3217, -4, // NOLINT
- 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, 3233, -4, // NOLINT
- 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, 3249, -4, // NOLINT
- 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, 3265, -4, // NOLINT
- 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, 3281, -4, // NOLINT
- 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, 3297, -4, // NOLINT
- 3299, -4, 1073745152, -29056, 3365, -29056 }; // NOLINT
+ 3169, -4, 3173, -43180, 3174, -43168, 3176, -4, 3178, -4, 3180, -4, 3187, -4, 3190, -4, // NOLINT
+ 3201, -4, 3203, -4, 3205, -4, 3207, -4, 3209, -4, 3211, -4, 3213, -4, 3215, -4, // NOLINT
+ 3217, -4, 3219, -4, 3221, -4, 3223, -4, 3225, -4, 3227, -4, 3229, -4, 3231, -4, // NOLINT
+ 3233, -4, 3235, -4, 3237, -4, 3239, -4, 3241, -4, 3243, -4, 3245, -4, 3247, -4, // NOLINT
+ 3249, -4, 3251, -4, 3253, -4, 3255, -4, 3257, -4, 3259, -4, 3261, -4, 3263, -4, // NOLINT
+ 3265, -4, 3267, -4, 3269, -4, 3271, -4, 3273, -4, 3275, -4, 3277, -4, 3279, -4, // NOLINT
+ 3281, -4, 3283, -4, 3285, -4, 3287, -4, 3289, -4, 3291, -4, 3293, -4, 3295, -4, // NOLINT
+ 3297, -4, 3299, -4, 3308, -4, 3310, -4, 3315, -4, 1073745152, -29056, 3365, -29056, 3367, -29056, // NOLINT
+ 3373, -29056 }; // NOLINT
static const uint16_t kEcma262CanonicalizeMultiStrings1Size = 1; // NOLINT
+static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings5[1] = { // NOLINT
+ {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262CanonicalizeTable5Size = 88; // NOLINT
+static const int32_t kEcma262CanonicalizeTable5[176] = {
+ 1601, -4, 1603, -4, 1605, -4, 1607, -4, 1609, -4, 1611, -4, 1613, -4, 1615, -4, // NOLINT
+ 1617, -4, 1619, -4, 1621, -4, 1623, -4, 1625, -4, 1627, -4, 1629, -4, 1631, -4, // NOLINT
+ 1633, -4, 1635, -4, 1637, -4, 1639, -4, 1641, -4, 1643, -4, 1645, -4, 1665, -4, // NOLINT
+ 1667, -4, 1669, -4, 1671, -4, 1673, -4, 1675, -4, 1677, -4, 1679, -4, 1681, -4, // NOLINT
+ 1683, -4, 1685, -4, 1687, -4, 1827, -4, 1829, -4, 1831, -4, 1833, -4, 1835, -4, // NOLINT
+ 1837, -4, 1839, -4, 1843, -4, 1845, -4, 1847, -4, 1849, -4, 1851, -4, 1853, -4, // NOLINT
+ 1855, -4, 1857, -4, 1859, -4, 1861, -4, 1863, -4, 1865, -4, 1867, -4, 1869, -4, // NOLINT
+ 1871, -4, 1873, -4, 1875, -4, 1877, -4, 1879, -4, 1881, -4, 1883, -4, 1885, -4, // NOLINT
+ 1887, -4, 1889, -4, 1891, -4, 1893, -4, 1895, -4, 1897, -4, 1899, -4, 1901, -4, // NOLINT
+ 1903, -4, 1914, -4, 1916, -4, 1919, -4, 1921, -4, 1923, -4, 1925, -4, 1927, -4, // NOLINT
+ 1932, -4, 1937, -4, 1939, -4, 1953, -4, 1955, -4, 1957, -4, 1959, -4, 1961, -4 }; // NOLINT
+static const uint16_t kEcma262CanonicalizeMultiStrings5Size = 1; // NOLINT
static const MultiCharacterSpecialCase<1> kEcma262CanonicalizeMultiStrings7[1] = { // NOLINT
{{kSentinel}} }; // NOLINT
static const uint16_t kEcma262CanonicalizeTable7Size = 2; // NOLINT
@@ -1185,6 +1344,13 @@ int Ecma262Canonicalize::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kEcma262CanonicalizeTable5,
+ kEcma262CanonicalizeTable5Size,
+ kEcma262CanonicalizeMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kEcma262CanonicalizeTable7,
kEcma262CanonicalizeTable7Size,
kEcma262CanonicalizeMultiStrings7,
@@ -1196,7 +1362,7 @@ int Ecma262Canonicalize::Convert(uchar c,
}
}
-static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[469] = { // NOLINT
+static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[497] = { // NOLINT
{{65, 97, kSentinel}}, {{90, 122, kSentinel}}, {{181, 924, 956, kSentinel}}, {{192, 224, kSentinel}}, // NOLINT
{{214, 246, kSentinel}}, {{216, 248, kSentinel}}, {{222, 254, kSentinel}}, {{255, 376, kSentinel}}, // NOLINT
{{256, 257, kSentinel}}, {{258, 259, kSentinel}}, {{260, 261, kSentinel}}, {{262, 263, kSentinel}}, // NOLINT
@@ -1238,16 +1404,19 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{546, 547, kSentinel}}, {{548, 549, kSentinel}}, {{550, 551, kSentinel}}, {{552, 553, kSentinel}}, // NOLINT
{{554, 555, kSentinel}}, {{556, 557, kSentinel}}, {{558, 559, kSentinel}}, {{560, 561, kSentinel}}, // NOLINT
{{562, 563, kSentinel}}, {{570, 11365, kSentinel}}, {{571, 572, kSentinel}}, {{574, 11366, kSentinel}}, // NOLINT
- {{577, 578, kSentinel}}, {{580, 649, kSentinel}}, {{581, 652, kSentinel}}, {{582, 583, kSentinel}}, // NOLINT
- {{584, 585, kSentinel}}, {{586, 587, kSentinel}}, {{588, 589, kSentinel}}, {{590, 591, kSentinel}}, // NOLINT
- {{619, 11362, kSentinel}}, {{637, 11364, kSentinel}}, {{837, 921, 953, 8126}}, {{891, 1021, kSentinel}}, // NOLINT
- {{893, 1023, kSentinel}}, {{902, 940, kSentinel}}, {{904, 941, kSentinel}}, {{906, 943, kSentinel}}, // NOLINT
- {{908, 972, kSentinel}}, {{910, 973, kSentinel}}, {{911, 974, kSentinel}}, {{913, 945, kSentinel}}, // NOLINT
- {{914, 946, 976, kSentinel}}, {{915, 947, kSentinel}}, {{916, 948, kSentinel}}, {{917, 949, 1013, kSentinel}}, // NOLINT
- {{918, 950, kSentinel}}, {{919, 951, kSentinel}}, {{920, 952, 977, kSentinel}}, {{922, 954, 1008, kSentinel}}, // NOLINT
- {{923, 955, kSentinel}}, {{925, 957, kSentinel}}, {{927, 959, kSentinel}}, {{928, 960, 982, kSentinel}}, // NOLINT
- {{929, 961, 1009, kSentinel}}, {{931, 962, 963, kSentinel}}, {{932, 964, kSentinel}}, {{933, 965, kSentinel}}, // NOLINT
- {{934, 966, 981, kSentinel}}, {{935, 967, kSentinel}}, {{939, 971, kSentinel}}, {{984, 985, kSentinel}}, // NOLINT
+ {{575, 11390, kSentinel}}, {{576, 11391, kSentinel}}, {{577, 578, kSentinel}}, {{580, 649, kSentinel}}, // NOLINT
+ {{581, 652, kSentinel}}, {{582, 583, kSentinel}}, {{584, 585, kSentinel}}, {{586, 587, kSentinel}}, // NOLINT
+ {{588, 589, kSentinel}}, {{590, 591, kSentinel}}, {{592, 11375, kSentinel}}, {{593, 11373, kSentinel}}, // NOLINT
+ {{594, 11376, kSentinel}}, {{613, 42893, kSentinel}}, {{614, 42922, kSentinel}}, {{619, 11362, kSentinel}}, // NOLINT
+ {{625, 11374, kSentinel}}, {{637, 11364, kSentinel}}, {{837, 921, 953, 8126}}, {{880, 881, kSentinel}}, // NOLINT
+ {{882, 883, kSentinel}}, {{886, 887, kSentinel}}, {{891, 1021, kSentinel}}, {{893, 1023, kSentinel}}, // NOLINT
+ {{902, 940, kSentinel}}, {{904, 941, kSentinel}}, {{906, 943, kSentinel}}, {{908, 972, kSentinel}}, // NOLINT
+ {{910, 973, kSentinel}}, {{911, 974, kSentinel}}, {{913, 945, kSentinel}}, {{914, 946, 976, kSentinel}}, // NOLINT
+ {{915, 947, kSentinel}}, {{916, 948, kSentinel}}, {{917, 949, 1013, kSentinel}}, {{918, 950, kSentinel}}, // NOLINT
+ {{919, 951, kSentinel}}, {{920, 952, 977, kSentinel}}, {{922, 954, 1008, kSentinel}}, {{923, 955, kSentinel}}, // NOLINT
+ {{925, 957, kSentinel}}, {{927, 959, kSentinel}}, {{928, 960, 982, kSentinel}}, {{929, 961, 1009, kSentinel}}, // NOLINT
+ {{931, 962, 963, kSentinel}}, {{932, 964, kSentinel}}, {{933, 965, kSentinel}}, {{934, 966, 981, kSentinel}}, // NOLINT
+ {{935, 967, kSentinel}}, {{939, 971, kSentinel}}, {{975, 983, kSentinel}}, {{984, 985, kSentinel}}, // NOLINT
{{986, 987, kSentinel}}, {{988, 989, kSentinel}}, {{990, 991, kSentinel}}, {{992, 993, kSentinel}}, // NOLINT
{{994, 995, kSentinel}}, {{996, 997, kSentinel}}, {{998, 999, kSentinel}}, {{1000, 1001, kSentinel}}, // NOLINT
{{1002, 1003, kSentinel}}, {{1004, 1005, kSentinel}}, {{1006, 1007, kSentinel}}, {{1010, 1017, kSentinel}}, // NOLINT
@@ -1274,38 +1443,42 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{1276, 1277, kSentinel}}, {{1278, 1279, kSentinel}}, {{1280, 1281, kSentinel}}, {{1282, 1283, kSentinel}}, // NOLINT
{{1284, 1285, kSentinel}}, {{1286, 1287, kSentinel}}, {{1288, 1289, kSentinel}}, {{1290, 1291, kSentinel}}, // NOLINT
{{1292, 1293, kSentinel}}, {{1294, 1295, kSentinel}}, {{1296, 1297, kSentinel}}, {{1298, 1299, kSentinel}}, // NOLINT
- {{1329, 1377, kSentinel}}, {{1366, 1414, kSentinel}}, {{4256, 11520, kSentinel}}, {{4293, 11557, kSentinel}}, // NOLINT
- {{7549, 11363, kSentinel}}, {{7680, 7681, kSentinel}}, {{7682, 7683, kSentinel}}, {{7684, 7685, kSentinel}}, // NOLINT
- {{7686, 7687, kSentinel}}, {{7688, 7689, kSentinel}}, {{7690, 7691, kSentinel}}, {{7692, 7693, kSentinel}}, // NOLINT
- {{7694, 7695, kSentinel}}, {{7696, 7697, kSentinel}}, {{7698, 7699, kSentinel}}, {{7700, 7701, kSentinel}}, // NOLINT
- {{7702, 7703, kSentinel}}, {{7704, 7705, kSentinel}}, {{7706, 7707, kSentinel}}, {{7708, 7709, kSentinel}}, // NOLINT
- {{7710, 7711, kSentinel}}, {{7712, 7713, kSentinel}}, {{7714, 7715, kSentinel}}, {{7716, 7717, kSentinel}}, // NOLINT
- {{7718, 7719, kSentinel}}, {{7720, 7721, kSentinel}}, {{7722, 7723, kSentinel}}, {{7724, 7725, kSentinel}}, // NOLINT
- {{7726, 7727, kSentinel}}, {{7728, 7729, kSentinel}}, {{7730, 7731, kSentinel}}, {{7732, 7733, kSentinel}}, // NOLINT
- {{7734, 7735, kSentinel}}, {{7736, 7737, kSentinel}}, {{7738, 7739, kSentinel}}, {{7740, 7741, kSentinel}}, // NOLINT
- {{7742, 7743, kSentinel}}, {{7744, 7745, kSentinel}}, {{7746, 7747, kSentinel}}, {{7748, 7749, kSentinel}}, // NOLINT
- {{7750, 7751, kSentinel}}, {{7752, 7753, kSentinel}}, {{7754, 7755, kSentinel}}, {{7756, 7757, kSentinel}}, // NOLINT
- {{7758, 7759, kSentinel}}, {{7760, 7761, kSentinel}}, {{7762, 7763, kSentinel}}, {{7764, 7765, kSentinel}}, // NOLINT
- {{7766, 7767, kSentinel}}, {{7768, 7769, kSentinel}}, {{7770, 7771, kSentinel}}, {{7772, 7773, kSentinel}}, // NOLINT
- {{7774, 7775, kSentinel}}, {{7776, 7777, 7835, kSentinel}}, {{7778, 7779, kSentinel}}, {{7780, 7781, kSentinel}}, // NOLINT
- {{7782, 7783, kSentinel}}, {{7784, 7785, kSentinel}}, {{7786, 7787, kSentinel}}, {{7788, 7789, kSentinel}}, // NOLINT
- {{7790, 7791, kSentinel}}, {{7792, 7793, kSentinel}}, {{7794, 7795, kSentinel}}, {{7796, 7797, kSentinel}}, // NOLINT
- {{7798, 7799, kSentinel}}, {{7800, 7801, kSentinel}}, {{7802, 7803, kSentinel}}, {{7804, 7805, kSentinel}}, // NOLINT
- {{7806, 7807, kSentinel}}, {{7808, 7809, kSentinel}}, {{7810, 7811, kSentinel}}, {{7812, 7813, kSentinel}}, // NOLINT
- {{7814, 7815, kSentinel}}, {{7816, 7817, kSentinel}}, {{7818, 7819, kSentinel}}, {{7820, 7821, kSentinel}}, // NOLINT
- {{7822, 7823, kSentinel}}, {{7824, 7825, kSentinel}}, {{7826, 7827, kSentinel}}, {{7828, 7829, kSentinel}}, // NOLINT
- {{7840, 7841, kSentinel}}, {{7842, 7843, kSentinel}}, {{7844, 7845, kSentinel}}, {{7846, 7847, kSentinel}}, // NOLINT
- {{7848, 7849, kSentinel}}, {{7850, 7851, kSentinel}}, {{7852, 7853, kSentinel}}, {{7854, 7855, kSentinel}}, // NOLINT
- {{7856, 7857, kSentinel}}, {{7858, 7859, kSentinel}}, {{7860, 7861, kSentinel}}, {{7862, 7863, kSentinel}}, // NOLINT
- {{7864, 7865, kSentinel}}, {{7866, 7867, kSentinel}}, {{7868, 7869, kSentinel}}, {{7870, 7871, kSentinel}}, // NOLINT
- {{7872, 7873, kSentinel}}, {{7874, 7875, kSentinel}}, {{7876, 7877, kSentinel}}, {{7878, 7879, kSentinel}}, // NOLINT
- {{7880, 7881, kSentinel}}, {{7882, 7883, kSentinel}}, {{7884, 7885, kSentinel}}, {{7886, 7887, kSentinel}}, // NOLINT
- {{7888, 7889, kSentinel}}, {{7890, 7891, kSentinel}}, {{7892, 7893, kSentinel}}, {{7894, 7895, kSentinel}}, // NOLINT
- {{7896, 7897, kSentinel}}, {{7898, 7899, kSentinel}}, {{7900, 7901, kSentinel}}, {{7902, 7903, kSentinel}}, // NOLINT
- {{7904, 7905, kSentinel}}, {{7906, 7907, kSentinel}}, {{7908, 7909, kSentinel}}, {{7910, 7911, kSentinel}}, // NOLINT
- {{7912, 7913, kSentinel}}, {{7914, 7915, kSentinel}}, {{7916, 7917, kSentinel}}, {{7918, 7919, kSentinel}}, // NOLINT
- {{7920, 7921, kSentinel}}, {{7922, 7923, kSentinel}}, {{7924, 7925, kSentinel}}, {{7926, 7927, kSentinel}}, // NOLINT
- {{7928, 7929, kSentinel}}, {{7936, 7944, kSentinel}}, {{7943, 7951, kSentinel}}, {{7952, 7960, kSentinel}}, // NOLINT
+ {{1300, 1301, kSentinel}}, {{1302, 1303, kSentinel}}, {{1304, 1305, kSentinel}}, {{1306, 1307, kSentinel}}, // NOLINT
+ {{1308, 1309, kSentinel}}, {{1310, 1311, kSentinel}}, {{1312, 1313, kSentinel}}, {{1314, 1315, kSentinel}}, // NOLINT
+ {{1316, 1317, kSentinel}}, {{1318, 1319, kSentinel}}, {{1329, 1377, kSentinel}}, {{1366, 1414, kSentinel}}, // NOLINT
+ {{4256, 11520, kSentinel}}, {{4293, 11557, kSentinel}}, {{4295, 11559, kSentinel}}, {{4301, 11565, kSentinel}}, // NOLINT
+ {{7545, 42877, kSentinel}}, {{7549, 11363, kSentinel}}, {{7680, 7681, kSentinel}}, {{7682, 7683, kSentinel}}, // NOLINT
+ {{7684, 7685, kSentinel}}, {{7686, 7687, kSentinel}}, {{7688, 7689, kSentinel}}, {{7690, 7691, kSentinel}}, // NOLINT
+ {{7692, 7693, kSentinel}}, {{7694, 7695, kSentinel}}, {{7696, 7697, kSentinel}}, {{7698, 7699, kSentinel}}, // NOLINT
+ {{7700, 7701, kSentinel}}, {{7702, 7703, kSentinel}}, {{7704, 7705, kSentinel}}, {{7706, 7707, kSentinel}}, // NOLINT
+ {{7708, 7709, kSentinel}}, {{7710, 7711, kSentinel}}, {{7712, 7713, kSentinel}}, {{7714, 7715, kSentinel}}, // NOLINT
+ {{7716, 7717, kSentinel}}, {{7718, 7719, kSentinel}}, {{7720, 7721, kSentinel}}, {{7722, 7723, kSentinel}}, // NOLINT
+ {{7724, 7725, kSentinel}}, {{7726, 7727, kSentinel}}, {{7728, 7729, kSentinel}}, {{7730, 7731, kSentinel}}, // NOLINT
+ {{7732, 7733, kSentinel}}, {{7734, 7735, kSentinel}}, {{7736, 7737, kSentinel}}, {{7738, 7739, kSentinel}}, // NOLINT
+ {{7740, 7741, kSentinel}}, {{7742, 7743, kSentinel}}, {{7744, 7745, kSentinel}}, {{7746, 7747, kSentinel}}, // NOLINT
+ {{7748, 7749, kSentinel}}, {{7750, 7751, kSentinel}}, {{7752, 7753, kSentinel}}, {{7754, 7755, kSentinel}}, // NOLINT
+ {{7756, 7757, kSentinel}}, {{7758, 7759, kSentinel}}, {{7760, 7761, kSentinel}}, {{7762, 7763, kSentinel}}, // NOLINT
+ {{7764, 7765, kSentinel}}, {{7766, 7767, kSentinel}}, {{7768, 7769, kSentinel}}, {{7770, 7771, kSentinel}}, // NOLINT
+ {{7772, 7773, kSentinel}}, {{7774, 7775, kSentinel}}, {{7776, 7777, 7835, kSentinel}}, {{7778, 7779, kSentinel}}, // NOLINT
+ {{7780, 7781, kSentinel}}, {{7782, 7783, kSentinel}}, {{7784, 7785, kSentinel}}, {{7786, 7787, kSentinel}}, // NOLINT
+ {{7788, 7789, kSentinel}}, {{7790, 7791, kSentinel}}, {{7792, 7793, kSentinel}}, {{7794, 7795, kSentinel}}, // NOLINT
+ {{7796, 7797, kSentinel}}, {{7798, 7799, kSentinel}}, {{7800, 7801, kSentinel}}, {{7802, 7803, kSentinel}}, // NOLINT
+ {{7804, 7805, kSentinel}}, {{7806, 7807, kSentinel}}, {{7808, 7809, kSentinel}}, {{7810, 7811, kSentinel}}, // NOLINT
+ {{7812, 7813, kSentinel}}, {{7814, 7815, kSentinel}}, {{7816, 7817, kSentinel}}, {{7818, 7819, kSentinel}}, // NOLINT
+ {{7820, 7821, kSentinel}}, {{7822, 7823, kSentinel}}, {{7824, 7825, kSentinel}}, {{7826, 7827, kSentinel}}, // NOLINT
+ {{7828, 7829, kSentinel}}, {{7840, 7841, kSentinel}}, {{7842, 7843, kSentinel}}, {{7844, 7845, kSentinel}}, // NOLINT
+ {{7846, 7847, kSentinel}}, {{7848, 7849, kSentinel}}, {{7850, 7851, kSentinel}}, {{7852, 7853, kSentinel}}, // NOLINT
+ {{7854, 7855, kSentinel}}, {{7856, 7857, kSentinel}}, {{7858, 7859, kSentinel}}, {{7860, 7861, kSentinel}}, // NOLINT
+ {{7862, 7863, kSentinel}}, {{7864, 7865, kSentinel}}, {{7866, 7867, kSentinel}}, {{7868, 7869, kSentinel}}, // NOLINT
+ {{7870, 7871, kSentinel}}, {{7872, 7873, kSentinel}}, {{7874, 7875, kSentinel}}, {{7876, 7877, kSentinel}}, // NOLINT
+ {{7878, 7879, kSentinel}}, {{7880, 7881, kSentinel}}, {{7882, 7883, kSentinel}}, {{7884, 7885, kSentinel}}, // NOLINT
+ {{7886, 7887, kSentinel}}, {{7888, 7889, kSentinel}}, {{7890, 7891, kSentinel}}, {{7892, 7893, kSentinel}}, // NOLINT
+ {{7894, 7895, kSentinel}}, {{7896, 7897, kSentinel}}, {{7898, 7899, kSentinel}}, {{7900, 7901, kSentinel}}, // NOLINT
+ {{7902, 7903, kSentinel}}, {{7904, 7905, kSentinel}}, {{7906, 7907, kSentinel}}, {{7908, 7909, kSentinel}}, // NOLINT
+ {{7910, 7911, kSentinel}}, {{7912, 7913, kSentinel}}, {{7914, 7915, kSentinel}}, {{7916, 7917, kSentinel}}, // NOLINT
+ {{7918, 7919, kSentinel}}, {{7920, 7921, kSentinel}}, {{7922, 7923, kSentinel}}, {{7924, 7925, kSentinel}}, // NOLINT
+ {{7926, 7927, kSentinel}}, {{7928, 7929, kSentinel}}, {{7930, 7931, kSentinel}}, {{7932, 7933, kSentinel}}, // NOLINT
+ {{7934, 7935, kSentinel}}, {{7936, 7944, kSentinel}}, {{7943, 7951, kSentinel}}, {{7952, 7960, kSentinel}}, // NOLINT
{{7957, 7965, kSentinel}}, {{7968, 7976, kSentinel}}, {{7975, 7983, kSentinel}}, {{7984, 7992, kSentinel}}, // NOLINT
{{7991, 7999, kSentinel}}, {{8000, 8008, kSentinel}}, {{8005, 8013, kSentinel}}, {{8017, 8025, kSentinel}}, // NOLINT
{{8019, 8027, kSentinel}}, {{8021, 8029, kSentinel}}, {{8023, 8031, kSentinel}}, {{8032, 8040, kSentinel}}, // NOLINT
@@ -1315,8 +1488,8 @@ static const MultiCharacterSpecialCase<4> kEcma262UnCanonicalizeMultiStrings0[46
{{8061, 8187, kSentinel}}, {{8112, 8120, kSentinel}}, {{8113, 8121, kSentinel}}, {{8144, 8152, kSentinel}}, // NOLINT
{{8145, 8153, kSentinel}}, {{8160, 8168, kSentinel}}, {{8161, 8169, kSentinel}}, {{8165, 8172, kSentinel}}, // NOLINT
{{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeTable0Size = 945; // NOLINT
-static const int32_t kEcma262UnCanonicalizeTable0[1890] = {
+static const uint16_t kEcma262UnCanonicalizeTable0Size = 990; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable0[1980] = {
1073741889, 1, 90, 5, 1073741921, 1, 122, 5, 181, 9, 1073742016, 13, 214, 17, 1073742040, 21, // NOLINT
222, 25, 1073742048, 13, 246, 17, 1073742072, 21, 254, 25, 255, 29, 256, 33, 257, 33, // NOLINT
258, 37, 259, 37, 260, 41, 261, 41, 262, 45, 263, 45, 264, 49, 265, 49, // NOLINT
@@ -1355,127 +1528,187 @@ static const int32_t kEcma262UnCanonicalizeTable0[1890] = {
539, 597, 540, 601, 541, 601, 542, 605, 543, 605, 544, 365, 546, 609, 547, 609, // NOLINT
548, 613, 549, 613, 550, 617, 551, 617, 552, 621, 553, 621, 554, 625, 555, 625, // NOLINT
556, 629, 557, 629, 558, 633, 559, 633, 560, 637, 561, 637, 562, 641, 563, 641, // NOLINT
- 570, 645, 571, 649, 572, 649, 573, 353, 574, 653, 577, 657, 578, 657, 579, 277, // NOLINT
- 580, 661, 581, 665, 582, 669, 583, 669, 584, 673, 585, 673, 586, 677, 587, 677, // NOLINT
- 588, 681, 589, 681, 590, 685, 591, 685, 595, 281, 596, 293, 1073742422, 301, 599, 305, // NOLINT
- 601, 317, 603, 321, 608, 329, 611, 333, 616, 345, 617, 341, 619, 689, 623, 357, // NOLINT
- 626, 361, 629, 369, 637, 693, 640, 385, 643, 393, 648, 401, 649, 661, 1073742474, 409, // NOLINT
- 651, 413, 652, 665, 658, 425, 837, 697, 1073742715, 701, 893, 705, 902, 709, 1073742728, 713, // NOLINT
- 906, 717, 908, 721, 1073742734, 725, 911, 729, 913, 733, 914, 737, 1073742739, 741, 916, 745, // NOLINT
- 917, 749, 1073742742, 753, 919, 757, 920, 761, 921, 697, 922, 765, 923, 769, 924, 9, // NOLINT
- 1073742749, 773, 927, 777, 928, 781, 929, 785, 931, 789, 1073742756, 793, 933, 797, 934, 801, // NOLINT
- 1073742759, 805, 939, 809, 940, 709, 1073742765, 713, 943, 717, 945, 733, 946, 737, 1073742771, 741, // NOLINT
- 948, 745, 949, 749, 1073742774, 753, 951, 757, 952, 761, 953, 697, 954, 765, 955, 769, // NOLINT
- 956, 9, 1073742781, 773, 959, 777, 960, 781, 961, 785, 962, 789, 963, 789, 1073742788, 793, // NOLINT
- 965, 797, 966, 801, 1073742791, 805, 971, 809, 972, 721, 1073742797, 725, 974, 729, 976, 737, // NOLINT
- 977, 761, 981, 801, 982, 781, 984, 813, 985, 813, 986, 817, 987, 817, 988, 821, // NOLINT
- 989, 821, 990, 825, 991, 825, 992, 829, 993, 829, 994, 833, 995, 833, 996, 837, // NOLINT
- 997, 837, 998, 841, 999, 841, 1000, 845, 1001, 845, 1002, 849, 1003, 849, 1004, 853, // NOLINT
- 1005, 853, 1006, 857, 1007, 857, 1008, 765, 1009, 785, 1010, 861, 1013, 749, 1015, 865, // NOLINT
- 1016, 865, 1017, 861, 1018, 869, 1019, 869, 1073742845, 701, 1023, 705, 1073742848, 873, 1039, 877, // NOLINT
- 1073742864, 881, 1071, 885, 1073742896, 881, 1103, 885, 1073742928, 873, 1119, 877, 1120, 889, 1121, 889, // NOLINT
- 1122, 893, 1123, 893, 1124, 897, 1125, 897, 1126, 901, 1127, 901, 1128, 905, 1129, 905, // NOLINT
- 1130, 909, 1131, 909, 1132, 913, 1133, 913, 1134, 917, 1135, 917, 1136, 921, 1137, 921, // NOLINT
- 1138, 925, 1139, 925, 1140, 929, 1141, 929, 1142, 933, 1143, 933, 1144, 937, 1145, 937, // NOLINT
- 1146, 941, 1147, 941, 1148, 945, 1149, 945, 1150, 949, 1151, 949, 1152, 953, 1153, 953, // NOLINT
- 1162, 957, 1163, 957, 1164, 961, 1165, 961, 1166, 965, 1167, 965, 1168, 969, 1169, 969, // NOLINT
- 1170, 973, 1171, 973, 1172, 977, 1173, 977, 1174, 981, 1175, 981, 1176, 985, 1177, 985, // NOLINT
- 1178, 989, 1179, 989, 1180, 993, 1181, 993, 1182, 997, 1183, 997, 1184, 1001, 1185, 1001, // NOLINT
- 1186, 1005, 1187, 1005, 1188, 1009, 1189, 1009, 1190, 1013, 1191, 1013, 1192, 1017, 1193, 1017, // NOLINT
- 1194, 1021, 1195, 1021, 1196, 1025, 1197, 1025, 1198, 1029, 1199, 1029, 1200, 1033, 1201, 1033, // NOLINT
- 1202, 1037, 1203, 1037, 1204, 1041, 1205, 1041, 1206, 1045, 1207, 1045, 1208, 1049, 1209, 1049, // NOLINT
- 1210, 1053, 1211, 1053, 1212, 1057, 1213, 1057, 1214, 1061, 1215, 1061, 1216, 1065, 1217, 1069, // NOLINT
- 1218, 1069, 1219, 1073, 1220, 1073, 1221, 1077, 1222, 1077, 1223, 1081, 1224, 1081, 1225, 1085, // NOLINT
- 1226, 1085, 1227, 1089, 1228, 1089, 1229, 1093, 1230, 1093, 1231, 1065, 1232, 1097, 1233, 1097, // NOLINT
- 1234, 1101, 1235, 1101, 1236, 1105, 1237, 1105, 1238, 1109, 1239, 1109, 1240, 1113, 1241, 1113, // NOLINT
- 1242, 1117, 1243, 1117, 1244, 1121, 1245, 1121, 1246, 1125, 1247, 1125, 1248, 1129, 1249, 1129, // NOLINT
- 1250, 1133, 1251, 1133, 1252, 1137, 1253, 1137, 1254, 1141, 1255, 1141, 1256, 1145, 1257, 1145, // NOLINT
- 1258, 1149, 1259, 1149, 1260, 1153, 1261, 1153, 1262, 1157, 1263, 1157, 1264, 1161, 1265, 1161, // NOLINT
- 1266, 1165, 1267, 1165, 1268, 1169, 1269, 1169, 1270, 1173, 1271, 1173, 1272, 1177, 1273, 1177, // NOLINT
- 1274, 1181, 1275, 1181, 1276, 1185, 1277, 1185, 1278, 1189, 1279, 1189, 1280, 1193, 1281, 1193, // NOLINT
- 1282, 1197, 1283, 1197, 1284, 1201, 1285, 1201, 1286, 1205, 1287, 1205, 1288, 1209, 1289, 1209, // NOLINT
- 1290, 1213, 1291, 1213, 1292, 1217, 1293, 1217, 1294, 1221, 1295, 1221, 1296, 1225, 1297, 1225, // NOLINT
- 1298, 1229, 1299, 1229, 1073743153, 1233, 1366, 1237, 1073743201, 1233, 1414, 1237, 1073746080, 1241, 4293, 1245, // NOLINT
- 7549, 1249, 7680, 1253, 7681, 1253, 7682, 1257, 7683, 1257, 7684, 1261, 7685, 1261, 7686, 1265, // NOLINT
- 7687, 1265, 7688, 1269, 7689, 1269, 7690, 1273, 7691, 1273, 7692, 1277, 7693, 1277, 7694, 1281, // NOLINT
- 7695, 1281, 7696, 1285, 7697, 1285, 7698, 1289, 7699, 1289, 7700, 1293, 7701, 1293, 7702, 1297, // NOLINT
- 7703, 1297, 7704, 1301, 7705, 1301, 7706, 1305, 7707, 1305, 7708, 1309, 7709, 1309, 7710, 1313, // NOLINT
- 7711, 1313, 7712, 1317, 7713, 1317, 7714, 1321, 7715, 1321, 7716, 1325, 7717, 1325, 7718, 1329, // NOLINT
- 7719, 1329, 7720, 1333, 7721, 1333, 7722, 1337, 7723, 1337, 7724, 1341, 7725, 1341, 7726, 1345, // NOLINT
- 7727, 1345, 7728, 1349, 7729, 1349, 7730, 1353, 7731, 1353, 7732, 1357, 7733, 1357, 7734, 1361, // NOLINT
- 7735, 1361, 7736, 1365, 7737, 1365, 7738, 1369, 7739, 1369, 7740, 1373, 7741, 1373, 7742, 1377, // NOLINT
- 7743, 1377, 7744, 1381, 7745, 1381, 7746, 1385, 7747, 1385, 7748, 1389, 7749, 1389, 7750, 1393, // NOLINT
- 7751, 1393, 7752, 1397, 7753, 1397, 7754, 1401, 7755, 1401, 7756, 1405, 7757, 1405, 7758, 1409, // NOLINT
- 7759, 1409, 7760, 1413, 7761, 1413, 7762, 1417, 7763, 1417, 7764, 1421, 7765, 1421, 7766, 1425, // NOLINT
- 7767, 1425, 7768, 1429, 7769, 1429, 7770, 1433, 7771, 1433, 7772, 1437, 7773, 1437, 7774, 1441, // NOLINT
- 7775, 1441, 7776, 1445, 7777, 1445, 7778, 1449, 7779, 1449, 7780, 1453, 7781, 1453, 7782, 1457, // NOLINT
- 7783, 1457, 7784, 1461, 7785, 1461, 7786, 1465, 7787, 1465, 7788, 1469, 7789, 1469, 7790, 1473, // NOLINT
- 7791, 1473, 7792, 1477, 7793, 1477, 7794, 1481, 7795, 1481, 7796, 1485, 7797, 1485, 7798, 1489, // NOLINT
- 7799, 1489, 7800, 1493, 7801, 1493, 7802, 1497, 7803, 1497, 7804, 1501, 7805, 1501, 7806, 1505, // NOLINT
- 7807, 1505, 7808, 1509, 7809, 1509, 7810, 1513, 7811, 1513, 7812, 1517, 7813, 1517, 7814, 1521, // NOLINT
- 7815, 1521, 7816, 1525, 7817, 1525, 7818, 1529, 7819, 1529, 7820, 1533, 7821, 1533, 7822, 1537, // NOLINT
- 7823, 1537, 7824, 1541, 7825, 1541, 7826, 1545, 7827, 1545, 7828, 1549, 7829, 1549, 7835, 1445, // NOLINT
- 7840, 1553, 7841, 1553, 7842, 1557, 7843, 1557, 7844, 1561, 7845, 1561, 7846, 1565, 7847, 1565, // NOLINT
- 7848, 1569, 7849, 1569, 7850, 1573, 7851, 1573, 7852, 1577, 7853, 1577, 7854, 1581, 7855, 1581, // NOLINT
- 7856, 1585, 7857, 1585, 7858, 1589, 7859, 1589, 7860, 1593, 7861, 1593, 7862, 1597, 7863, 1597, // NOLINT
- 7864, 1601, 7865, 1601, 7866, 1605, 7867, 1605, 7868, 1609, 7869, 1609, 7870, 1613, 7871, 1613, // NOLINT
- 7872, 1617, 7873, 1617, 7874, 1621, 7875, 1621, 7876, 1625, 7877, 1625, 7878, 1629, 7879, 1629, // NOLINT
- 7880, 1633, 7881, 1633, 7882, 1637, 7883, 1637, 7884, 1641, 7885, 1641, 7886, 1645, 7887, 1645, // NOLINT
- 7888, 1649, 7889, 1649, 7890, 1653, 7891, 1653, 7892, 1657, 7893, 1657, 7894, 1661, 7895, 1661, // NOLINT
- 7896, 1665, 7897, 1665, 7898, 1669, 7899, 1669, 7900, 1673, 7901, 1673, 7902, 1677, 7903, 1677, // NOLINT
- 7904, 1681, 7905, 1681, 7906, 1685, 7907, 1685, 7908, 1689, 7909, 1689, 7910, 1693, 7911, 1693, // NOLINT
- 7912, 1697, 7913, 1697, 7914, 1701, 7915, 1701, 7916, 1705, 7917, 1705, 7918, 1709, 7919, 1709, // NOLINT
- 7920, 1713, 7921, 1713, 7922, 1717, 7923, 1717, 7924, 1721, 7925, 1721, 7926, 1725, 7927, 1725, // NOLINT
- 7928, 1729, 7929, 1729, 1073749760, 1733, 7943, 1737, 1073749768, 1733, 7951, 1737, 1073749776, 1741, 7957, 1745, // NOLINT
- 1073749784, 1741, 7965, 1745, 1073749792, 1749, 7975, 1753, 1073749800, 1749, 7983, 1753, 1073749808, 1757, 7991, 1761, // NOLINT
- 1073749816, 1757, 7999, 1761, 1073749824, 1765, 8005, 1769, 1073749832, 1765, 8013, 1769, 8017, 1773, 8019, 1777, // NOLINT
- 8021, 1781, 8023, 1785, 8025, 1773, 8027, 1777, 8029, 1781, 8031, 1785, 1073749856, 1789, 8039, 1793, // NOLINT
- 1073749864, 1789, 8047, 1793, 1073749872, 1797, 8049, 1801, 1073749874, 1805, 8053, 1809, 1073749878, 1813, 8055, 1817, // NOLINT
- 1073749880, 1821, 8057, 1825, 1073749882, 1829, 8059, 1833, 1073749884, 1837, 8061, 1841, 1073749936, 1845, 8113, 1849, // NOLINT
- 1073749944, 1845, 8121, 1849, 1073749946, 1797, 8123, 1801, 8126, 697, 1073749960, 1805, 8139, 1809, 1073749968, 1853, // NOLINT
- 8145, 1857, 1073749976, 1853, 8153, 1857, 1073749978, 1813, 8155, 1817, 1073749984, 1861, 8161, 1865, 8165, 1869, // NOLINT
- 1073749992, 1861, 8169, 1865, 1073749994, 1829, 8171, 1833, 8172, 1869, 1073750008, 1821, 8185, 1825, 1073750010, 1837, // NOLINT
- 8187, 1841 }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeMultiStrings0Size = 469; // NOLINT
-static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings1[71] = { // NOLINT
+ 570, 645, 571, 649, 572, 649, 573, 353, 574, 653, 1073742399, 657, 576, 661, 577, 665, // NOLINT
+ 578, 665, 579, 277, 580, 669, 581, 673, 582, 677, 583, 677, 584, 681, 585, 681, // NOLINT
+ 586, 685, 587, 685, 588, 689, 589, 689, 590, 693, 591, 693, 592, 697, 593, 701, // NOLINT
+ 594, 705, 595, 281, 596, 293, 1073742422, 301, 599, 305, 601, 317, 603, 321, 608, 329, // NOLINT
+ 611, 333, 613, 709, 614, 713, 616, 345, 617, 341, 619, 717, 623, 357, 625, 721, // NOLINT
+ 626, 361, 629, 369, 637, 725, 640, 385, 643, 393, 648, 401, 649, 669, 1073742474, 409, // NOLINT
+ 651, 413, 652, 673, 658, 425, 837, 729, 880, 733, 881, 733, 882, 737, 883, 737, // NOLINT
+ 886, 741, 887, 741, 1073742715, 745, 893, 749, 902, 753, 1073742728, 757, 906, 761, 908, 765, // NOLINT
+ 1073742734, 769, 911, 773, 913, 777, 914, 781, 1073742739, 785, 916, 789, 917, 793, 1073742742, 797, // NOLINT
+ 919, 801, 920, 805, 921, 729, 922, 809, 923, 813, 924, 9, 1073742749, 817, 927, 821, // NOLINT
+ 928, 825, 929, 829, 931, 833, 1073742756, 837, 933, 841, 934, 845, 1073742759, 849, 939, 853, // NOLINT
+ 940, 753, 1073742765, 757, 943, 761, 945, 777, 946, 781, 1073742771, 785, 948, 789, 949, 793, // NOLINT
+ 1073742774, 797, 951, 801, 952, 805, 953, 729, 954, 809, 955, 813, 956, 9, 1073742781, 817, // NOLINT
+ 959, 821, 960, 825, 961, 829, 962, 833, 963, 833, 1073742788, 837, 965, 841, 966, 845, // NOLINT
+ 1073742791, 849, 971, 853, 972, 765, 1073742797, 769, 974, 773, 975, 857, 976, 781, 977, 805, // NOLINT
+ 981, 845, 982, 825, 983, 857, 984, 861, 985, 861, 986, 865, 987, 865, 988, 869, // NOLINT
+ 989, 869, 990, 873, 991, 873, 992, 877, 993, 877, 994, 881, 995, 881, 996, 885, // NOLINT
+ 997, 885, 998, 889, 999, 889, 1000, 893, 1001, 893, 1002, 897, 1003, 897, 1004, 901, // NOLINT
+ 1005, 901, 1006, 905, 1007, 905, 1008, 809, 1009, 829, 1010, 909, 1013, 793, 1015, 913, // NOLINT
+ 1016, 913, 1017, 909, 1018, 917, 1019, 917, 1073742845, 745, 1023, 749, 1073742848, 921, 1039, 925, // NOLINT
+ 1073742864, 929, 1071, 933, 1073742896, 929, 1103, 933, 1073742928, 921, 1119, 925, 1120, 937, 1121, 937, // NOLINT
+ 1122, 941, 1123, 941, 1124, 945, 1125, 945, 1126, 949, 1127, 949, 1128, 953, 1129, 953, // NOLINT
+ 1130, 957, 1131, 957, 1132, 961, 1133, 961, 1134, 965, 1135, 965, 1136, 969, 1137, 969, // NOLINT
+ 1138, 973, 1139, 973, 1140, 977, 1141, 977, 1142, 981, 1143, 981, 1144, 985, 1145, 985, // NOLINT
+ 1146, 989, 1147, 989, 1148, 993, 1149, 993, 1150, 997, 1151, 997, 1152, 1001, 1153, 1001, // NOLINT
+ 1162, 1005, 1163, 1005, 1164, 1009, 1165, 1009, 1166, 1013, 1167, 1013, 1168, 1017, 1169, 1017, // NOLINT
+ 1170, 1021, 1171, 1021, 1172, 1025, 1173, 1025, 1174, 1029, 1175, 1029, 1176, 1033, 1177, 1033, // NOLINT
+ 1178, 1037, 1179, 1037, 1180, 1041, 1181, 1041, 1182, 1045, 1183, 1045, 1184, 1049, 1185, 1049, // NOLINT
+ 1186, 1053, 1187, 1053, 1188, 1057, 1189, 1057, 1190, 1061, 1191, 1061, 1192, 1065, 1193, 1065, // NOLINT
+ 1194, 1069, 1195, 1069, 1196, 1073, 1197, 1073, 1198, 1077, 1199, 1077, 1200, 1081, 1201, 1081, // NOLINT
+ 1202, 1085, 1203, 1085, 1204, 1089, 1205, 1089, 1206, 1093, 1207, 1093, 1208, 1097, 1209, 1097, // NOLINT
+ 1210, 1101, 1211, 1101, 1212, 1105, 1213, 1105, 1214, 1109, 1215, 1109, 1216, 1113, 1217, 1117, // NOLINT
+ 1218, 1117, 1219, 1121, 1220, 1121, 1221, 1125, 1222, 1125, 1223, 1129, 1224, 1129, 1225, 1133, // NOLINT
+ 1226, 1133, 1227, 1137, 1228, 1137, 1229, 1141, 1230, 1141, 1231, 1113, 1232, 1145, 1233, 1145, // NOLINT
+ 1234, 1149, 1235, 1149, 1236, 1153, 1237, 1153, 1238, 1157, 1239, 1157, 1240, 1161, 1241, 1161, // NOLINT
+ 1242, 1165, 1243, 1165, 1244, 1169, 1245, 1169, 1246, 1173, 1247, 1173, 1248, 1177, 1249, 1177, // NOLINT
+ 1250, 1181, 1251, 1181, 1252, 1185, 1253, 1185, 1254, 1189, 1255, 1189, 1256, 1193, 1257, 1193, // NOLINT
+ 1258, 1197, 1259, 1197, 1260, 1201, 1261, 1201, 1262, 1205, 1263, 1205, 1264, 1209, 1265, 1209, // NOLINT
+ 1266, 1213, 1267, 1213, 1268, 1217, 1269, 1217, 1270, 1221, 1271, 1221, 1272, 1225, 1273, 1225, // NOLINT
+ 1274, 1229, 1275, 1229, 1276, 1233, 1277, 1233, 1278, 1237, 1279, 1237, 1280, 1241, 1281, 1241, // NOLINT
+ 1282, 1245, 1283, 1245, 1284, 1249, 1285, 1249, 1286, 1253, 1287, 1253, 1288, 1257, 1289, 1257, // NOLINT
+ 1290, 1261, 1291, 1261, 1292, 1265, 1293, 1265, 1294, 1269, 1295, 1269, 1296, 1273, 1297, 1273, // NOLINT
+ 1298, 1277, 1299, 1277, 1300, 1281, 1301, 1281, 1302, 1285, 1303, 1285, 1304, 1289, 1305, 1289, // NOLINT
+ 1306, 1293, 1307, 1293, 1308, 1297, 1309, 1297, 1310, 1301, 1311, 1301, 1312, 1305, 1313, 1305, // NOLINT
+ 1314, 1309, 1315, 1309, 1316, 1313, 1317, 1313, 1318, 1317, 1319, 1317, 1073743153, 1321, 1366, 1325, // NOLINT
+ 1073743201, 1321, 1414, 1325, 1073746080, 1329, 4293, 1333, 4295, 1337, 4301, 1341, 7545, 1345, 7549, 1349, // NOLINT
+ 7680, 1353, 7681, 1353, 7682, 1357, 7683, 1357, 7684, 1361, 7685, 1361, 7686, 1365, 7687, 1365, // NOLINT
+ 7688, 1369, 7689, 1369, 7690, 1373, 7691, 1373, 7692, 1377, 7693, 1377, 7694, 1381, 7695, 1381, // NOLINT
+ 7696, 1385, 7697, 1385, 7698, 1389, 7699, 1389, 7700, 1393, 7701, 1393, 7702, 1397, 7703, 1397, // NOLINT
+ 7704, 1401, 7705, 1401, 7706, 1405, 7707, 1405, 7708, 1409, 7709, 1409, 7710, 1413, 7711, 1413, // NOLINT
+ 7712, 1417, 7713, 1417, 7714, 1421, 7715, 1421, 7716, 1425, 7717, 1425, 7718, 1429, 7719, 1429, // NOLINT
+ 7720, 1433, 7721, 1433, 7722, 1437, 7723, 1437, 7724, 1441, 7725, 1441, 7726, 1445, 7727, 1445, // NOLINT
+ 7728, 1449, 7729, 1449, 7730, 1453, 7731, 1453, 7732, 1457, 7733, 1457, 7734, 1461, 7735, 1461, // NOLINT
+ 7736, 1465, 7737, 1465, 7738, 1469, 7739, 1469, 7740, 1473, 7741, 1473, 7742, 1477, 7743, 1477, // NOLINT
+ 7744, 1481, 7745, 1481, 7746, 1485, 7747, 1485, 7748, 1489, 7749, 1489, 7750, 1493, 7751, 1493, // NOLINT
+ 7752, 1497, 7753, 1497, 7754, 1501, 7755, 1501, 7756, 1505, 7757, 1505, 7758, 1509, 7759, 1509, // NOLINT
+ 7760, 1513, 7761, 1513, 7762, 1517, 7763, 1517, 7764, 1521, 7765, 1521, 7766, 1525, 7767, 1525, // NOLINT
+ 7768, 1529, 7769, 1529, 7770, 1533, 7771, 1533, 7772, 1537, 7773, 1537, 7774, 1541, 7775, 1541, // NOLINT
+ 7776, 1545, 7777, 1545, 7778, 1549, 7779, 1549, 7780, 1553, 7781, 1553, 7782, 1557, 7783, 1557, // NOLINT
+ 7784, 1561, 7785, 1561, 7786, 1565, 7787, 1565, 7788, 1569, 7789, 1569, 7790, 1573, 7791, 1573, // NOLINT
+ 7792, 1577, 7793, 1577, 7794, 1581, 7795, 1581, 7796, 1585, 7797, 1585, 7798, 1589, 7799, 1589, // NOLINT
+ 7800, 1593, 7801, 1593, 7802, 1597, 7803, 1597, 7804, 1601, 7805, 1601, 7806, 1605, 7807, 1605, // NOLINT
+ 7808, 1609, 7809, 1609, 7810, 1613, 7811, 1613, 7812, 1617, 7813, 1617, 7814, 1621, 7815, 1621, // NOLINT
+ 7816, 1625, 7817, 1625, 7818, 1629, 7819, 1629, 7820, 1633, 7821, 1633, 7822, 1637, 7823, 1637, // NOLINT
+ 7824, 1641, 7825, 1641, 7826, 1645, 7827, 1645, 7828, 1649, 7829, 1649, 7835, 1545, 7840, 1653, // NOLINT
+ 7841, 1653, 7842, 1657, 7843, 1657, 7844, 1661, 7845, 1661, 7846, 1665, 7847, 1665, 7848, 1669, // NOLINT
+ 7849, 1669, 7850, 1673, 7851, 1673, 7852, 1677, 7853, 1677, 7854, 1681, 7855, 1681, 7856, 1685, // NOLINT
+ 7857, 1685, 7858, 1689, 7859, 1689, 7860, 1693, 7861, 1693, 7862, 1697, 7863, 1697, 7864, 1701, // NOLINT
+ 7865, 1701, 7866, 1705, 7867, 1705, 7868, 1709, 7869, 1709, 7870, 1713, 7871, 1713, 7872, 1717, // NOLINT
+ 7873, 1717, 7874, 1721, 7875, 1721, 7876, 1725, 7877, 1725, 7878, 1729, 7879, 1729, 7880, 1733, // NOLINT
+ 7881, 1733, 7882, 1737, 7883, 1737, 7884, 1741, 7885, 1741, 7886, 1745, 7887, 1745, 7888, 1749, // NOLINT
+ 7889, 1749, 7890, 1753, 7891, 1753, 7892, 1757, 7893, 1757, 7894, 1761, 7895, 1761, 7896, 1765, // NOLINT
+ 7897, 1765, 7898, 1769, 7899, 1769, 7900, 1773, 7901, 1773, 7902, 1777, 7903, 1777, 7904, 1781, // NOLINT
+ 7905, 1781, 7906, 1785, 7907, 1785, 7908, 1789, 7909, 1789, 7910, 1793, 7911, 1793, 7912, 1797, // NOLINT
+ 7913, 1797, 7914, 1801, 7915, 1801, 7916, 1805, 7917, 1805, 7918, 1809, 7919, 1809, 7920, 1813, // NOLINT
+ 7921, 1813, 7922, 1817, 7923, 1817, 7924, 1821, 7925, 1821, 7926, 1825, 7927, 1825, 7928, 1829, // NOLINT
+ 7929, 1829, 7930, 1833, 7931, 1833, 7932, 1837, 7933, 1837, 7934, 1841, 7935, 1841, 1073749760, 1845, // NOLINT
+ 7943, 1849, 1073749768, 1845, 7951, 1849, 1073749776, 1853, 7957, 1857, 1073749784, 1853, 7965, 1857, 1073749792, 1861, // NOLINT
+ 7975, 1865, 1073749800, 1861, 7983, 1865, 1073749808, 1869, 7991, 1873, 1073749816, 1869, 7999, 1873, 1073749824, 1877, // NOLINT
+ 8005, 1881, 1073749832, 1877, 8013, 1881, 8017, 1885, 8019, 1889, 8021, 1893, 8023, 1897, 8025, 1885, // NOLINT
+ 8027, 1889, 8029, 1893, 8031, 1897, 1073749856, 1901, 8039, 1905, 1073749864, 1901, 8047, 1905, 1073749872, 1909, // NOLINT
+ 8049, 1913, 1073749874, 1917, 8053, 1921, 1073749878, 1925, 8055, 1929, 1073749880, 1933, 8057, 1937, 1073749882, 1941, // NOLINT
+ 8059, 1945, 1073749884, 1949, 8061, 1953, 1073749936, 1957, 8113, 1961, 1073749944, 1957, 8121, 1961, 1073749946, 1909, // NOLINT
+ 8123, 1913, 8126, 729, 1073749960, 1917, 8139, 1921, 1073749968, 1965, 8145, 1969, 1073749976, 1965, 8153, 1969, // NOLINT
+ 1073749978, 1925, 8155, 1929, 1073749984, 1973, 8161, 1977, 8165, 1981, 1073749992, 1973, 8169, 1977, 1073749994, 1941, // NOLINT
+ 8171, 1945, 8172, 1981, 1073750008, 1933, 8185, 1937, 1073750010, 1949, 8187, 1953 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings0Size = 497; // NOLINT
+static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings1[83] = { // NOLINT
{{8498, 8526}}, {{8544, 8560}}, {{8559, 8575}}, {{8579, 8580}}, // NOLINT
{{9398, 9424}}, {{9423, 9449}}, {{11264, 11312}}, {{11310, 11358}}, // NOLINT
{{11360, 11361}}, {{619, 11362}}, {{7549, 11363}}, {{637, 11364}}, // NOLINT
{{570, 11365}}, {{574, 11366}}, {{11367, 11368}}, {{11369, 11370}}, // NOLINT
- {{11371, 11372}}, {{11381, 11382}}, {{11392, 11393}}, {{11394, 11395}}, // NOLINT
- {{11396, 11397}}, {{11398, 11399}}, {{11400, 11401}}, {{11402, 11403}}, // NOLINT
- {{11404, 11405}}, {{11406, 11407}}, {{11408, 11409}}, {{11410, 11411}}, // NOLINT
- {{11412, 11413}}, {{11414, 11415}}, {{11416, 11417}}, {{11418, 11419}}, // NOLINT
- {{11420, 11421}}, {{11422, 11423}}, {{11424, 11425}}, {{11426, 11427}}, // NOLINT
- {{11428, 11429}}, {{11430, 11431}}, {{11432, 11433}}, {{11434, 11435}}, // NOLINT
- {{11436, 11437}}, {{11438, 11439}}, {{11440, 11441}}, {{11442, 11443}}, // NOLINT
- {{11444, 11445}}, {{11446, 11447}}, {{11448, 11449}}, {{11450, 11451}}, // NOLINT
- {{11452, 11453}}, {{11454, 11455}}, {{11456, 11457}}, {{11458, 11459}}, // NOLINT
- {{11460, 11461}}, {{11462, 11463}}, {{11464, 11465}}, {{11466, 11467}}, // NOLINT
- {{11468, 11469}}, {{11470, 11471}}, {{11472, 11473}}, {{11474, 11475}}, // NOLINT
- {{11476, 11477}}, {{11478, 11479}}, {{11480, 11481}}, {{11482, 11483}}, // NOLINT
- {{11484, 11485}}, {{11486, 11487}}, {{11488, 11489}}, {{11490, 11491}}, // NOLINT
- {{4256, 11520}}, {{4293, 11557}}, {{kSentinel}} }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeTable1Size = 133; // NOLINT
-static const int32_t kEcma262UnCanonicalizeTable1[266] = {
+ {{11371, 11372}}, {{593, 11373}}, {{625, 11374}}, {{592, 11375}}, // NOLINT
+ {{594, 11376}}, {{11378, 11379}}, {{11381, 11382}}, {{575, 11390}}, // NOLINT
+ {{576, 11391}}, {{11392, 11393}}, {{11394, 11395}}, {{11396, 11397}}, // NOLINT
+ {{11398, 11399}}, {{11400, 11401}}, {{11402, 11403}}, {{11404, 11405}}, // NOLINT
+ {{11406, 11407}}, {{11408, 11409}}, {{11410, 11411}}, {{11412, 11413}}, // NOLINT
+ {{11414, 11415}}, {{11416, 11417}}, {{11418, 11419}}, {{11420, 11421}}, // NOLINT
+ {{11422, 11423}}, {{11424, 11425}}, {{11426, 11427}}, {{11428, 11429}}, // NOLINT
+ {{11430, 11431}}, {{11432, 11433}}, {{11434, 11435}}, {{11436, 11437}}, // NOLINT
+ {{11438, 11439}}, {{11440, 11441}}, {{11442, 11443}}, {{11444, 11445}}, // NOLINT
+ {{11446, 11447}}, {{11448, 11449}}, {{11450, 11451}}, {{11452, 11453}}, // NOLINT
+ {{11454, 11455}}, {{11456, 11457}}, {{11458, 11459}}, {{11460, 11461}}, // NOLINT
+ {{11462, 11463}}, {{11464, 11465}}, {{11466, 11467}}, {{11468, 11469}}, // NOLINT
+ {{11470, 11471}}, {{11472, 11473}}, {{11474, 11475}}, {{11476, 11477}}, // NOLINT
+ {{11478, 11479}}, {{11480, 11481}}, {{11482, 11483}}, {{11484, 11485}}, // NOLINT
+ {{11486, 11487}}, {{11488, 11489}}, {{11490, 11491}}, {{11499, 11500}}, // NOLINT
+ {{11501, 11502}}, {{11506, 11507}}, {{4256, 11520}}, {{4293, 11557}}, // NOLINT
+ {{4295, 11559}}, {{4301, 11565}}, {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeTable1Size = 149; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable1[298] = {
306, 1, 334, 1, 1073742176, 5, 367, 9, 1073742192, 5, 383, 9, 387, 13, 388, 13, // NOLINT
1073743030, 17, 1231, 21, 1073743056, 17, 1257, 21, 1073744896, 25, 3118, 29, 1073744944, 25, 3166, 29, // NOLINT
3168, 33, 3169, 33, 3170, 37, 3171, 41, 3172, 45, 3173, 49, 3174, 53, 3175, 57, // NOLINT
- 3176, 57, 3177, 61, 3178, 61, 3179, 65, 3180, 65, 3189, 69, 3190, 69, 3200, 73, // NOLINT
- 3201, 73, 3202, 77, 3203, 77, 3204, 81, 3205, 81, 3206, 85, 3207, 85, 3208, 89, // NOLINT
- 3209, 89, 3210, 93, 3211, 93, 3212, 97, 3213, 97, 3214, 101, 3215, 101, 3216, 105, // NOLINT
- 3217, 105, 3218, 109, 3219, 109, 3220, 113, 3221, 113, 3222, 117, 3223, 117, 3224, 121, // NOLINT
- 3225, 121, 3226, 125, 3227, 125, 3228, 129, 3229, 129, 3230, 133, 3231, 133, 3232, 137, // NOLINT
- 3233, 137, 3234, 141, 3235, 141, 3236, 145, 3237, 145, 3238, 149, 3239, 149, 3240, 153, // NOLINT
- 3241, 153, 3242, 157, 3243, 157, 3244, 161, 3245, 161, 3246, 165, 3247, 165, 3248, 169, // NOLINT
- 3249, 169, 3250, 173, 3251, 173, 3252, 177, 3253, 177, 3254, 181, 3255, 181, 3256, 185, // NOLINT
- 3257, 185, 3258, 189, 3259, 189, 3260, 193, 3261, 193, 3262, 197, 3263, 197, 3264, 201, // NOLINT
- 3265, 201, 3266, 205, 3267, 205, 3268, 209, 3269, 209, 3270, 213, 3271, 213, 3272, 217, // NOLINT
- 3273, 217, 3274, 221, 3275, 221, 3276, 225, 3277, 225, 3278, 229, 3279, 229, 3280, 233, // NOLINT
- 3281, 233, 3282, 237, 3283, 237, 3284, 241, 3285, 241, 3286, 245, 3287, 245, 3288, 249, // NOLINT
- 3289, 249, 3290, 253, 3291, 253, 3292, 257, 3293, 257, 3294, 261, 3295, 261, 3296, 265, // NOLINT
- 3297, 265, 3298, 269, 3299, 269, 1073745152, 273, 3365, 277 }; // NOLINT
-static const uint16_t kEcma262UnCanonicalizeMultiStrings1Size = 71; // NOLINT
+ 3176, 57, 3177, 61, 3178, 61, 3179, 65, 3180, 65, 3181, 69, 3182, 73, 3183, 77, // NOLINT
+ 3184, 81, 3186, 85, 3187, 85, 3189, 89, 3190, 89, 1073745022, 93, 3199, 97, 3200, 101, // NOLINT
+ 3201, 101, 3202, 105, 3203, 105, 3204, 109, 3205, 109, 3206, 113, 3207, 113, 3208, 117, // NOLINT
+ 3209, 117, 3210, 121, 3211, 121, 3212, 125, 3213, 125, 3214, 129, 3215, 129, 3216, 133, // NOLINT
+ 3217, 133, 3218, 137, 3219, 137, 3220, 141, 3221, 141, 3222, 145, 3223, 145, 3224, 149, // NOLINT
+ 3225, 149, 3226, 153, 3227, 153, 3228, 157, 3229, 157, 3230, 161, 3231, 161, 3232, 165, // NOLINT
+ 3233, 165, 3234, 169, 3235, 169, 3236, 173, 3237, 173, 3238, 177, 3239, 177, 3240, 181, // NOLINT
+ 3241, 181, 3242, 185, 3243, 185, 3244, 189, 3245, 189, 3246, 193, 3247, 193, 3248, 197, // NOLINT
+ 3249, 197, 3250, 201, 3251, 201, 3252, 205, 3253, 205, 3254, 209, 3255, 209, 3256, 213, // NOLINT
+ 3257, 213, 3258, 217, 3259, 217, 3260, 221, 3261, 221, 3262, 225, 3263, 225, 3264, 229, // NOLINT
+ 3265, 229, 3266, 233, 3267, 233, 3268, 237, 3269, 237, 3270, 241, 3271, 241, 3272, 245, // NOLINT
+ 3273, 245, 3274, 249, 3275, 249, 3276, 253, 3277, 253, 3278, 257, 3279, 257, 3280, 261, // NOLINT
+ 3281, 261, 3282, 265, 3283, 265, 3284, 269, 3285, 269, 3286, 273, 3287, 273, 3288, 277, // NOLINT
+ 3289, 277, 3290, 281, 3291, 281, 3292, 285, 3293, 285, 3294, 289, 3295, 289, 3296, 293, // NOLINT
+ 3297, 293, 3298, 297, 3299, 297, 3307, 301, 3308, 301, 3309, 305, 3310, 305, 3314, 309, // NOLINT
+ 3315, 309, 1073745152, 313, 3365, 317, 3367, 321, 3373, 325 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings1Size = 83; // NOLINT
+static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings5[92] = { // NOLINT
+ {{42560, 42561}}, {{42562, 42563}}, {{42564, 42565}}, {{42566, 42567}}, // NOLINT
+ {{42568, 42569}}, {{42570, 42571}}, {{42572, 42573}}, {{42574, 42575}}, // NOLINT
+ {{42576, 42577}}, {{42578, 42579}}, {{42580, 42581}}, {{42582, 42583}}, // NOLINT
+ {{42584, 42585}}, {{42586, 42587}}, {{42588, 42589}}, {{42590, 42591}}, // NOLINT
+ {{42592, 42593}}, {{42594, 42595}}, {{42596, 42597}}, {{42598, 42599}}, // NOLINT
+ {{42600, 42601}}, {{42602, 42603}}, {{42604, 42605}}, {{42624, 42625}}, // NOLINT
+ {{42626, 42627}}, {{42628, 42629}}, {{42630, 42631}}, {{42632, 42633}}, // NOLINT
+ {{42634, 42635}}, {{42636, 42637}}, {{42638, 42639}}, {{42640, 42641}}, // NOLINT
+ {{42642, 42643}}, {{42644, 42645}}, {{42646, 42647}}, {{42786, 42787}}, // NOLINT
+ {{42788, 42789}}, {{42790, 42791}}, {{42792, 42793}}, {{42794, 42795}}, // NOLINT
+ {{42796, 42797}}, {{42798, 42799}}, {{42802, 42803}}, {{42804, 42805}}, // NOLINT
+ {{42806, 42807}}, {{42808, 42809}}, {{42810, 42811}}, {{42812, 42813}}, // NOLINT
+ {{42814, 42815}}, {{42816, 42817}}, {{42818, 42819}}, {{42820, 42821}}, // NOLINT
+ {{42822, 42823}}, {{42824, 42825}}, {{42826, 42827}}, {{42828, 42829}}, // NOLINT
+ {{42830, 42831}}, {{42832, 42833}}, {{42834, 42835}}, {{42836, 42837}}, // NOLINT
+ {{42838, 42839}}, {{42840, 42841}}, {{42842, 42843}}, {{42844, 42845}}, // NOLINT
+ {{42846, 42847}}, {{42848, 42849}}, {{42850, 42851}}, {{42852, 42853}}, // NOLINT
+ {{42854, 42855}}, {{42856, 42857}}, {{42858, 42859}}, {{42860, 42861}}, // NOLINT
+ {{42862, 42863}}, {{42873, 42874}}, {{42875, 42876}}, {{7545, 42877}}, // NOLINT
+ {{42878, 42879}}, {{42880, 42881}}, {{42882, 42883}}, {{42884, 42885}}, // NOLINT
+ {{42886, 42887}}, {{42891, 42892}}, {{613, 42893}}, {{42896, 42897}}, // NOLINT
+ {{42898, 42899}}, {{42912, 42913}}, {{42914, 42915}}, {{42916, 42917}}, // NOLINT
+ {{42918, 42919}}, {{42920, 42921}}, {{614, 42922}}, {{kSentinel}} }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeTable5Size = 179; // NOLINT
+static const int32_t kEcma262UnCanonicalizeTable5[358] = {
+ 1600, 1, 1601, 1, 1602, 5, 1603, 5, 1604, 9, 1605, 9, 1606, 13, 1607, 13, // NOLINT
+ 1608, 17, 1609, 17, 1610, 21, 1611, 21, 1612, 25, 1613, 25, 1614, 29, 1615, 29, // NOLINT
+ 1616, 33, 1617, 33, 1618, 37, 1619, 37, 1620, 41, 1621, 41, 1622, 45, 1623, 45, // NOLINT
+ 1624, 49, 1625, 49, 1626, 53, 1627, 53, 1628, 57, 1629, 57, 1630, 61, 1631, 61, // NOLINT
+ 1632, 65, 1633, 65, 1634, 69, 1635, 69, 1636, 73, 1637, 73, 1638, 77, 1639, 77, // NOLINT
+ 1640, 81, 1641, 81, 1642, 85, 1643, 85, 1644, 89, 1645, 89, 1664, 93, 1665, 93, // NOLINT
+ 1666, 97, 1667, 97, 1668, 101, 1669, 101, 1670, 105, 1671, 105, 1672, 109, 1673, 109, // NOLINT
+ 1674, 113, 1675, 113, 1676, 117, 1677, 117, 1678, 121, 1679, 121, 1680, 125, 1681, 125, // NOLINT
+ 1682, 129, 1683, 129, 1684, 133, 1685, 133, 1686, 137, 1687, 137, 1826, 141, 1827, 141, // NOLINT
+ 1828, 145, 1829, 145, 1830, 149, 1831, 149, 1832, 153, 1833, 153, 1834, 157, 1835, 157, // NOLINT
+ 1836, 161, 1837, 161, 1838, 165, 1839, 165, 1842, 169, 1843, 169, 1844, 173, 1845, 173, // NOLINT
+ 1846, 177, 1847, 177, 1848, 181, 1849, 181, 1850, 185, 1851, 185, 1852, 189, 1853, 189, // NOLINT
+ 1854, 193, 1855, 193, 1856, 197, 1857, 197, 1858, 201, 1859, 201, 1860, 205, 1861, 205, // NOLINT
+ 1862, 209, 1863, 209, 1864, 213, 1865, 213, 1866, 217, 1867, 217, 1868, 221, 1869, 221, // NOLINT
+ 1870, 225, 1871, 225, 1872, 229, 1873, 229, 1874, 233, 1875, 233, 1876, 237, 1877, 237, // NOLINT
+ 1878, 241, 1879, 241, 1880, 245, 1881, 245, 1882, 249, 1883, 249, 1884, 253, 1885, 253, // NOLINT
+ 1886, 257, 1887, 257, 1888, 261, 1889, 261, 1890, 265, 1891, 265, 1892, 269, 1893, 269, // NOLINT
+ 1894, 273, 1895, 273, 1896, 277, 1897, 277, 1898, 281, 1899, 281, 1900, 285, 1901, 285, // NOLINT
+ 1902, 289, 1903, 289, 1913, 293, 1914, 293, 1915, 297, 1916, 297, 1917, 301, 1918, 305, // NOLINT
+ 1919, 305, 1920, 309, 1921, 309, 1922, 313, 1923, 313, 1924, 317, 1925, 317, 1926, 321, // NOLINT
+ 1927, 321, 1931, 325, 1932, 325, 1933, 329, 1936, 333, 1937, 333, 1938, 337, 1939, 337, // NOLINT
+ 1952, 341, 1953, 341, 1954, 345, 1955, 345, 1956, 349, 1957, 349, 1958, 353, 1959, 353, // NOLINT
+ 1960, 357, 1961, 357, 1962, 361 }; // NOLINT
+static const uint16_t kEcma262UnCanonicalizeMultiStrings5Size = 92; // NOLINT
static const MultiCharacterSpecialCase<2> kEcma262UnCanonicalizeMultiStrings7[3] = { // NOLINT
{{65313, 65345}}, {{65338, 65370}}, {{kSentinel}} }; // NOLINT
static const uint16_t kEcma262UnCanonicalizeTable7Size = 4; // NOLINT
@@ -1502,6 +1735,13 @@ int Ecma262UnCanonicalize::Convert(uchar c,
n,
result,
allow_caching_ptr);
+ case 5: return LookupMapping<true>(kEcma262UnCanonicalizeTable5,
+ kEcma262UnCanonicalizeTable5Size,
+ kEcma262UnCanonicalizeMultiStrings5,
+ c,
+ n,
+ result,
+ allow_caching_ptr);
case 7: return LookupMapping<true>(kEcma262UnCanonicalizeTable7,
kEcma262UnCanonicalizeTable7Size,
kEcma262UnCanonicalizeMultiStrings7,
@@ -1577,9 +1817,11 @@ const uchar UnicodeData::kMaxCodePoint = 65533;
int UnicodeData::GetByteCount() {
return kUppercaseTable0Size * sizeof(int32_t) // NOLINT
+ kUppercaseTable1Size * sizeof(int32_t) // NOLINT
+ + kUppercaseTable5Size * sizeof(int32_t) // NOLINT
+ kUppercaseTable7Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable0Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable1Size * sizeof(int32_t) // NOLINT
+ + kLowercaseTable5Size * sizeof(int32_t) // NOLINT
+ kLowercaseTable7Size * sizeof(int32_t) // NOLINT
+ kLetterTable0Size * sizeof(int32_t) // NOLINT
+ kLetterTable1Size * sizeof(int32_t) // NOLINT
@@ -1592,6 +1834,7 @@ int UnicodeData::GetByteCount() {
+ kSpaceTable0Size * sizeof(int32_t) // NOLINT
+ kSpaceTable1Size * sizeof(int32_t) // NOLINT
+ kNumberTable0Size * sizeof(int32_t) // NOLINT
+ + kNumberTable5Size * sizeof(int32_t) // NOLINT
+ kNumberTable7Size * sizeof(int32_t) // NOLINT
+ kWhiteSpaceTable0Size * sizeof(int32_t) // NOLINT
+ kWhiteSpaceTable1Size * sizeof(int32_t) // NOLINT
@@ -1606,15 +1849,19 @@ int UnicodeData::GetByteCount() {
+ kConnectorPunctuationTable7Size * sizeof(int32_t) // NOLINT
+ kToLowercaseMultiStrings0Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kToLowercaseMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kToLowercaseMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToLowercaseMultiStrings7Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToUppercaseMultiStrings0Size * sizeof(MultiCharacterSpecialCase<3>) // NOLINT
+ kToUppercaseMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kToUppercaseMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kToUppercaseMultiStrings7Size * sizeof(MultiCharacterSpecialCase<3>) // NOLINT
+ kEcma262CanonicalizeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262CanonicalizeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ + kEcma262CanonicalizeMultiStrings5Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262CanonicalizeMultiStrings7Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<4>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ + kEcma262UnCanonicalizeMultiStrings5Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kEcma262UnCanonicalizeMultiStrings7Size * sizeof(MultiCharacterSpecialCase<2>) // NOLINT
+ kCanonicalizationRangeMultiStrings0Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
+ kCanonicalizationRangeMultiStrings1Size * sizeof(MultiCharacterSpecialCase<1>) // NOLINT
diff --git a/src/3rdparty/v8/src/unicode.h b/src/3rdparty/v8/src/unicode.h
index 39fc349..94ab1b4 100644
--- a/src/3rdparty/v8/src/unicode.h
+++ b/src/3rdparty/v8/src/unicode.h
@@ -1,4 +1,4 @@
-// Copyright 2007-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -44,7 +44,7 @@ typedef unsigned char byte;
* The max length of the result of converting the case of a single
* character.
*/
-static const int kMaxMappingSize = 4;
+const int kMaxMappingSize = 4;
template <class T, int size = 256>
class Predicate {
@@ -100,7 +100,7 @@ class UnicodeData {
static const uchar kMaxCodePoint;
};
-// --- U t f 8 ---
+// --- U t f 8 a n d 16 ---
template <typename Data>
class Buffer {
@@ -114,10 +114,46 @@ class Buffer {
unsigned length_;
};
+
+class Utf16 {
+ public:
+ static inline bool IsLeadSurrogate(int code) {
+ if (code == kNoPreviousCharacter) return false;
+ return (code & 0xfc00) == 0xd800;
+ }
+ static inline bool IsTrailSurrogate(int code) {
+ if (code == kNoPreviousCharacter) return false;
+ return (code & 0xfc00) == 0xdc00;
+ }
+
+ static inline int CombineSurrogatePair(uchar lead, uchar trail) {
+ return 0x10000 + ((lead & 0x3ff) << 10) + (trail & 0x3ff);
+ }
+ static const int kNoPreviousCharacter = -1;
+ static const uchar kMaxNonSurrogateCharCode = 0xffff;
+ // Encoding a single UTF-16 code unit will produce 1, 2 or 3 bytes
+ // of UTF-8 data. The special case where the unit is a surrogate
+ // trail produces 1 byte net, because the encoding of the pair is
+ // 4 bytes and the 3 bytes that were used to encode the lead surrogate
+ // can be reclaimed.
+ static const int kMaxExtraUtf8BytesForOneUtf16CodeUnit = 3;
+ // One UTF-16 surrogate is endoded (illegally) as 3 UTF-8 bytes.
+ // The illegality stems from the surrogate not being part of a pair.
+ static const int kUtf8BytesToCodeASurrogate = 3;
+ static inline uchar LeadSurrogate(int char_code) {
+ return 0xd800 + (((char_code - 0x10000) >> 10) & 0x3ff);
+ }
+ static inline uchar TrailSurrogate(int char_code) {
+ return 0xdc00 + (char_code & 0x3ff);
+ }
+};
+
+
class Utf8 {
public:
- static inline uchar Length(uchar chr);
- static inline unsigned Encode(char* out, uchar c);
+ static inline uchar Length(uchar chr, int previous);
+ static inline unsigned Encode(
+ char* out, uchar c, int previous);
static const byte* ReadBlock(Buffer<const char*> str, byte* buffer,
unsigned capacity, unsigned* chars_read, unsigned* offset);
static uchar CalculateValue(const byte* str,
@@ -130,6 +166,11 @@ class Utf8 {
static const unsigned kMaxThreeByteChar = 0xffff;
static const unsigned kMaxFourByteChar = 0x1fffff;
+ // A single surrogate is coded as a 3 byte UTF-8 sequence, but two together
+ // that match are coded as a 4 byte UTF-8 sequence.
+ static const unsigned kBytesSavedByCombiningSurrogates = 2;
+ static const unsigned kSizeOfUnmatchedSurrogate = 3;
+
private:
template <unsigned s> friend class Utf8InputBuffer;
friend class Test;
@@ -147,6 +188,7 @@ class CharacterStream {
// Note that default implementation is not efficient.
virtual void Seek(unsigned);
unsigned Length();
+ unsigned Utf16Length();
virtual ~CharacterStream() { }
static inline bool EncodeCharacter(uchar c, byte* buffer, unsigned capacity,
unsigned& offset);
@@ -156,6 +198,7 @@ class CharacterStream {
unsigned capacity, unsigned& offset);
static inline uchar DecodeCharacter(const byte* buffer, unsigned* offset);
virtual void Rewind() = 0;
+
protected:
virtual void FillBuffer() = 0;
// The number of characters left in the current buffer
diff --git a/src/3rdparty/v8/src/uri.js b/src/3rdparty/v8/src/uri.js
index 1656664..b195f3d 100644
--- a/src/3rdparty/v8/src/uri.js
+++ b/src/3rdparty/v8/src/uri.js
@@ -219,7 +219,7 @@ function Decode(uri, reserved) {
var cc = URIHexCharsToCharCode(uri.charCodeAt(++k), uri.charCodeAt(++k));
if (cc >> 7) {
var n = 0;
- while (((cc << ++n) & 0x80) != 0) ;
+ while (((cc << ++n) & 0x80) != 0) { }
if (n == 1 || n > 4) throw new $URIError("URI malformed");
var octets = new $Array(n);
octets[0] = cc;
@@ -250,7 +250,7 @@ function Decode(uri, reserved) {
// ECMA-262 - 15.1.3.1.
function URIDecode(uri) {
- function reservedPredicate(cc) {
+ var reservedPredicate = function(cc) {
// #$
if (35 <= cc && cc <= 36) return true;
// &
@@ -275,7 +275,7 @@ function URIDecode(uri) {
// ECMA-262 - 15.1.3.2.
function URIDecodeComponent(component) {
- function reservedPredicate(cc) { return false; };
+ var reservedPredicate = function(cc) { return false; };
var string = ToString(component);
return Decode(string, reservedPredicate);
}
@@ -296,7 +296,7 @@ function isAlphaNumeric(cc) {
// ECMA-262 - 15.1.3.3.
function URIEncode(uri) {
- function unescapePredicate(cc) {
+ var unescapePredicate = function(cc) {
if (isAlphaNumeric(cc)) return true;
// !
if (cc == 33) return true;
@@ -325,7 +325,7 @@ function URIEncode(uri) {
// ECMA-262 - 15.1.3.4
function URIEncodeComponent(component) {
- function unescapePredicate(cc) {
+ var unescapePredicate = function(cc) {
if (isAlphaNumeric(cc)) return true;
// !
if (cc == 33) return true;
diff --git a/src/3rdparty/v8/src/utils.h b/src/3rdparty/v8/src/utils.h
index 2e6cfbd..1d40c98 100644
--- a/src/3rdparty/v8/src/utils.h
+++ b/src/3rdparty/v8/src/utils.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -47,13 +47,13 @@ namespace internal {
// Returns true iff x is a power of 2 (or zero). Cannot be used with the
// maximally negative value of the type T (the -1 overflows).
template <typename T>
-static inline bool IsPowerOf2(T x) {
+inline bool IsPowerOf2(T x) {
return IS_POWER_OF_TWO(x);
}
// X must be a power of 2. Returns the number of trailing zeros.
-static inline int WhichPowerOf2(uint32_t x) {
+inline int WhichPowerOf2(uint32_t x) {
ASSERT(IsPowerOf2(x));
ASSERT(x != 0);
int bits = 0;
@@ -88,7 +88,7 @@ static inline int WhichPowerOf2(uint32_t x) {
// The C++ standard leaves the semantics of '>>' undefined for
// negative signed operands. Most implementations do the right thing,
// though.
-static inline int ArithmeticShiftRight(int x, int s) {
+inline int ArithmeticShiftRight(int x, int s) {
return x >> s;
}
@@ -97,7 +97,7 @@ static inline int ArithmeticShiftRight(int x, int s) {
// This allows conversion of Addresses and integral types into
// 0-relative int offsets.
template <typename T>
-static inline intptr_t OffsetFrom(T x) {
+inline intptr_t OffsetFrom(T x) {
return x - static_cast<T>(0);
}
@@ -106,14 +106,14 @@ static inline intptr_t OffsetFrom(T x) {
// This allows conversion of 0-relative int offsets into Addresses and
// integral types.
template <typename T>
-static inline T AddressFrom(intptr_t x) {
+inline T AddressFrom(intptr_t x) {
return static_cast<T>(static_cast<T>(0) + x);
}
// Return the largest multiple of m which is <= x.
template <typename T>
-static inline T RoundDown(T x, intptr_t m) {
+inline T RoundDown(T x, intptr_t m) {
ASSERT(IsPowerOf2(m));
return AddressFrom<T>(OffsetFrom(x) & -m);
}
@@ -121,13 +121,13 @@ static inline T RoundDown(T x, intptr_t m) {
// Return the smallest multiple of m which is >= x.
template <typename T>
-static inline T RoundUp(T x, intptr_t m) {
+inline T RoundUp(T x, intptr_t m) {
return RoundDown<T>(static_cast<T>(x + m - 1), m);
}
template <typename T>
-static int Compare(const T& a, const T& b) {
+int Compare(const T& a, const T& b) {
if (a == b)
return 0;
else if (a < b)
@@ -138,7 +138,7 @@ static int Compare(const T& a, const T& b) {
template <typename T>
-static int PointerValueCompare(const T* a, const T* b) {
+int PointerValueCompare(const T* a, const T* b) {
return Compare<T>(*a, *b);
}
@@ -148,7 +148,7 @@ static int PointerValueCompare(const T* a, const T* b) {
// handles.
template<typename T> class Handle; // Forward declaration.
template <typename T>
-static int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) {
+int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) {
return Compare<T*>(*(*a), *(*b));
}
@@ -157,7 +157,7 @@ static int HandleObjectPointerCompare(const Handle<T>* a, const Handle<T>* b) {
// number that is already a power of two, it is returned as is.
// Implementation is from "Hacker's Delight" by Henry S. Warren, Jr.,
// figure 3-3, page 48, where the function is called clp2.
-static inline uint32_t RoundUpToPowerOf2(uint32_t x) {
+inline uint32_t RoundUpToPowerOf2(uint32_t x) {
ASSERT(x <= 0x80000000u);
x = x - 1;
x = x | (x >> 1);
@@ -169,7 +169,7 @@ static inline uint32_t RoundUpToPowerOf2(uint32_t x) {
}
-static inline uint32_t RoundDownToPowerOf2(uint32_t x) {
+inline uint32_t RoundDownToPowerOf2(uint32_t x) {
uint32_t rounded_up = RoundUpToPowerOf2(x);
if (rounded_up > x) return rounded_up >> 1;
return rounded_up;
@@ -177,15 +177,15 @@ static inline uint32_t RoundDownToPowerOf2(uint32_t x) {
template <typename T, typename U>
-static inline bool IsAligned(T value, U alignment) {
+inline bool IsAligned(T value, U alignment) {
return (value & (alignment - 1)) == 0;
}
// Returns true if (addr + offset) is aligned.
-static inline bool IsAddressAligned(Address addr,
- intptr_t alignment,
- int offset = 0) {
+inline bool IsAddressAligned(Address addr,
+ intptr_t alignment,
+ int offset = 0) {
intptr_t offs = OffsetFrom(addr + offset);
return IsAligned(offs, alignment);
}
@@ -193,14 +193,14 @@ static inline bool IsAddressAligned(Address addr,
// Returns the maximum of the two parameters.
template <typename T>
-static T Max(T a, T b) {
+T Max(T a, T b) {
return a < b ? b : a;
}
// Returns the minimum of the two parameters.
template <typename T>
-static T Min(T a, T b) {
+T Min(T a, T b) {
return a < b ? a : b;
}
@@ -252,10 +252,13 @@ class BitField {
// ----------------------------------------------------------------------------
// Hash function.
+static const uint32_t kZeroHashSeed = 0;
+
// Thomas Wang, Integer Hash Functions.
// http://www.concentric.net/~Ttwang/tech/inthash.htm
-static inline uint32_t ComputeIntegerHash(uint32_t key) {
+inline uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed) {
uint32_t hash = key;
+ hash = hash ^ seed;
hash = ~hash + (hash << 15); // hash = (hash << 15) - hash - 1;
hash = hash ^ (hash >> 12);
hash = hash + (hash << 2);
@@ -266,7 +269,7 @@ static inline uint32_t ComputeIntegerHash(uint32_t key) {
}
-static inline uint32_t ComputeLongHash(uint64_t key) {
+inline uint32_t ComputeLongHash(uint64_t key) {
uint64_t hash = key;
hash = ~hash + (hash << 18); // hash = (hash << 18) - hash - 1;
hash = hash ^ (hash >> 31);
@@ -278,9 +281,10 @@ static inline uint32_t ComputeLongHash(uint64_t key) {
}
-static inline uint32_t ComputePointerHash(void* ptr) {
+inline uint32_t ComputePointerHash(void* ptr) {
return ComputeIntegerHash(
- static_cast<uint32_t>(reinterpret_cast<intptr_t>(ptr)));
+ static_cast<uint32_t>(reinterpret_cast<intptr_t>(ptr)),
+ v8::internal::kZeroHashSeed);
}
@@ -734,7 +738,7 @@ class SequenceCollector : public Collector<T, growth_factor, max_growth> {
// Compare ASCII/16bit chars to ASCII/16bit chars.
template <typename lchar, typename rchar>
-static inline int CompareChars(const lchar* lhs, const rchar* rhs, int chars) {
+inline int CompareChars(const lchar* lhs, const rchar* rhs, int chars) {
const lchar* limit = lhs + chars;
#ifdef V8_HOST_CAN_READ_UNALIGNED
if (sizeof(*lhs) == sizeof(*rhs)) {
@@ -761,7 +765,7 @@ static inline int CompareChars(const lchar* lhs, const rchar* rhs, int chars) {
// Calculate 10^exponent.
-static inline int TenToThe(int exponent) {
+inline int TenToThe(int exponent) {
ASSERT(exponent <= 9);
ASSERT(exponent >= 1);
int answer = 10;
@@ -927,9 +931,17 @@ class EnumSet {
explicit EnumSet(T bits = 0) : bits_(bits) {}
bool IsEmpty() const { return bits_ == 0; }
bool Contains(E element) const { return (bits_ & Mask(element)) != 0; }
+ bool ContainsAnyOf(const EnumSet& set) const {
+ return (bits_ & set.bits_) != 0;
+ }
void Add(E element) { bits_ |= Mask(element); }
+ void Add(const EnumSet& set) { bits_ |= set.bits_; }
void Remove(E element) { bits_ &= ~Mask(element); }
+ void Remove(const EnumSet& set) { bits_ &= ~set.bits_; }
+ void RemoveAll() { bits_ = 0; }
+ void Intersect(const EnumSet& set) { bits_ &= set.bits_; }
T ToIntegral() const { return bits_; }
+ bool operator==(const EnumSet& set) { return bits_ == set.bits_; }
private:
T Mask(E element) const {
diff --git a/src/3rdparty/v8/src/v8-counters.h b/src/3rdparty/v8/src/v8-counters.h
index 47341e7..6db9c77 100644
--- a/src/3rdparty/v8/src/v8-counters.h
+++ b/src/3rdparty/v8/src/v8-counters.h
@@ -198,6 +198,9 @@ namespace internal {
SC(constructed_objects_stub, V8.ConstructedObjectsStub) \
SC(negative_lookups, V8.NegativeLookups) \
SC(negative_lookups_miss, V8.NegativeLookupsMiss) \
+ SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
+ SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
+ SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
SC(array_function_runtime, V8.ArrayFunctionRuntime) \
SC(array_function_native, V8.ArrayFunctionNative) \
SC(for_in, V8.ForIn) \
diff --git a/src/3rdparty/v8/src/v8.cc b/src/3rdparty/v8/src/v8.cc
index 66c65e7..45036c8 100644
--- a/src/3rdparty/v8/src/v8.cc
+++ b/src/3rdparty/v8/src/v8.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,15 +27,19 @@
#include "v8.h"
+#include "assembler.h"
#include "isolate.h"
#include "elements.h"
#include "bootstrapper.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "frames.h"
#include "heap-profiler.h"
#include "hydrogen.h"
#include "lithium-allocator.h"
#include "log.h"
+#include "once.h"
+#include "platform.h"
#include "runtime-profiler.h"
#include "serialize.h"
#include "store-buffer.h"
@@ -43,28 +47,22 @@
namespace v8 {
namespace internal {
-static Mutex* init_once_mutex = OS::CreateMutex();
-static bool init_once_called = false;
+V8_DECLARE_ONCE(init_once);
bool V8::is_running_ = false;
-bool V8::has_been_setup_ = false;
+bool V8::has_been_set_up_ = false;
bool V8::has_been_disposed_ = false;
bool V8::has_fatal_error_ = false;
bool V8::use_crankshaft_ = true;
+List<CallCompletedCallback>* V8::call_completed_callbacks_ = NULL;
+
+static LazyMutex entropy_mutex = LAZY_MUTEX_INITIALIZER;
-static Mutex* entropy_mutex = OS::CreateMutex();
static EntropySource entropy_source;
bool V8::Initialize(Deserializer* des) {
- // Setting --harmony implies all other harmony flags.
- // TODO(rossberg): Is there a better place to put this?
- if (FLAG_harmony) {
- FLAG_harmony_typeof = true;
- FLAG_harmony_scoping = true;
- FLAG_harmony_proxies = true;
- FLAG_harmony_collections = true;
- }
+ FlagList::EnforceFlagImplications();
InitializeOncePerProcess();
@@ -88,7 +86,7 @@ bool V8::Initialize(Deserializer* des) {
if (isolate->IsInitialized()) return true;
is_running_ = true;
- has_been_setup_ = true;
+ has_been_set_up_ = true;
has_fatal_error_ = false;
has_been_disposed_ = false;
@@ -106,11 +104,20 @@ void V8::TearDown() {
Isolate* isolate = Isolate::Current();
ASSERT(isolate->IsDefaultIsolate());
- if (!has_been_setup_ || has_been_disposed_) return;
+ if (!has_been_set_up_ || has_been_disposed_) return;
+
+ ElementsAccessor::TearDown();
+ LOperand::TearDownCaches();
+ RegisteredExtension::UnregisterAll();
+
isolate->TearDown();
+ delete isolate;
is_running_ = false;
has_been_disposed_ = true;
+
+ delete call_completed_callbacks_;
+ call_completed_callbacks_ = NULL;
}
@@ -120,7 +127,7 @@ static void seed_random(uint32_t* state) {
state[i] = FLAG_random_seed;
} else if (entropy_source != NULL) {
uint32_t val;
- ScopedLock lock(entropy_mutex);
+ ScopedLock lock(entropy_mutex.Pointer());
entropy_source(reinterpret_cast<unsigned char*>(&val), sizeof(uint32_t));
state[i] = val;
} else {
@@ -149,6 +156,12 @@ void V8::SetEntropySource(EntropySource source) {
}
+void V8::SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver) {
+ StackFrame::SetReturnAddressLocationResolver(resolver);
+}
+
+
// Used by JavaScript APIs
uint32_t V8::Random(Context* context) {
ASSERT(context->IsGlobalContext());
@@ -166,13 +179,48 @@ uint32_t V8::RandomPrivate(Isolate* isolate) {
}
-bool V8::IdleNotification() {
+bool V8::IdleNotification(int hint) {
// Returning true tells the caller that there is no need to call
// IdleNotification again.
if (!FLAG_use_idle_notification) return true;
// Tell the heap that it may want to adjust.
- return HEAP->IdleNotification();
+ return HEAP->IdleNotification(hint);
+}
+
+
+void V8::AddCallCompletedCallback(CallCompletedCallback callback) {
+ if (call_completed_callbacks_ == NULL) { // Lazy init.
+ call_completed_callbacks_ = new List<CallCompletedCallback>();
+ }
+ for (int i = 0; i < call_completed_callbacks_->length(); i++) {
+ if (callback == call_completed_callbacks_->at(i)) return;
+ }
+ call_completed_callbacks_->Add(callback);
+}
+
+
+void V8::RemoveCallCompletedCallback(CallCompletedCallback callback) {
+ if (call_completed_callbacks_ == NULL) return;
+ for (int i = 0; i < call_completed_callbacks_->length(); i++) {
+ if (callback == call_completed_callbacks_->at(i)) {
+ call_completed_callbacks_->Remove(i);
+ }
+ }
+}
+
+
+void V8::FireCallCompletedCallback(Isolate* isolate) {
+ if (call_completed_callbacks_ == NULL) return;
+ HandleScopeImplementer* handle_scope_implementer =
+ isolate->handle_scope_implementer();
+ if (!handle_scope_implementer->CallDepthIsZero()) return;
+ // Fire callbacks. Increase call depth to prevent recursive callbacks.
+ handle_scope_implementer->IncrementCallDepth();
+ for (int i = 0; i < call_completed_callbacks_->length(); i++) {
+ call_completed_callbacks_->at(i)();
+ }
+ handle_scope_implementer->DecrementCallDepth();
}
@@ -185,30 +233,23 @@ typedef union {
Object* V8::FillHeapNumberWithRandom(Object* heap_number,
Context* context) {
+ double_int_union r;
uint64_t random_bits = Random(context);
- // Make a double* from address (heap_number + sizeof(double)).
- double_int_union* r = reinterpret_cast<double_int_union*>(
- reinterpret_cast<char*>(heap_number) +
- HeapNumber::kValueOffset - kHeapObjectTag);
// Convert 32 random bits to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
- const double binary_million = 1048576.0;
- r->double_value = binary_million;
- r->uint64_t_value |= random_bits;
- r->double_value -= binary_million;
+ static const double binary_million = 1048576.0;
+ r.double_value = binary_million;
+ r.uint64_t_value |= random_bits;
+ r.double_value -= binary_million;
+ HeapNumber::cast(heap_number)->set_value(r.double_value);
return heap_number;
}
-
-void V8::InitializeOncePerProcess() {
- ScopedLock lock(init_once_mutex);
- if (init_once_called) return;
- init_once_called = true;
-
- // Setup the platform OS support.
- OS::Setup();
+void V8::InitializeOncePerProcessImpl() {
+ // Set up the platform OS support.
+ OS::SetUp();
use_crankshaft_ = FLAG_crankshaft;
@@ -216,15 +257,14 @@ void V8::InitializeOncePerProcess() {
use_crankshaft_ = false;
}
- CPU::Setup();
+ CPU::SetUp();
if (!CPU::SupportsCrankshaft()) {
use_crankshaft_ = false;
}
- RuntimeProfiler::GlobalSetup();
+ OS::PostSetUp();
- // Peephole optimization might interfere with deoptimization.
- FLAG_peephole_optimization = !use_crankshaft_;
+ RuntimeProfiler::GlobalSetUp();
ElementsAccessor::InitializeOncePerProcess();
@@ -233,6 +273,15 @@ void V8::InitializeOncePerProcess() {
FLAG_gc_global = true;
FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2;
}
+
+ LOperand::SetUpCaches();
+ SetUpJSCallerSavedCodeData();
+ SamplerRegistry::SetUp();
+ ExternalReference::SetUp();
+}
+
+void V8::InitializeOncePerProcess() {
+ CallOnce(&init_once, &InitializeOncePerProcessImpl);
}
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/v8.h b/src/3rdparty/v8/src/v8.h
index 01feefc..59ce602 100644
--- a/src/3rdparty/v8/src/v8.h
+++ b/src/3rdparty/v8/src/v8.h
@@ -95,6 +95,9 @@ class V8 : public AllStatic {
// Allows an entropy source to be provided for use in random number
// generation.
static void SetEntropySource(EntropySource source);
+ // Support for return-address rewriting profilers.
+ static void SetReturnAddressLocationResolver(
+ ReturnAddressLocationResolver resolver);
// Random number generation support. Not cryptographically safe.
static uint32_t Random(Context* context);
// We use random numbers internally in memory allocation and in the
@@ -106,15 +109,20 @@ class V8 : public AllStatic {
Context* context);
// Idle notification directly from the API.
- static bool IdleNotification();
+ static bool IdleNotification(int hint);
+
+ static void AddCallCompletedCallback(CallCompletedCallback callback);
+ static void RemoveCallCompletedCallback(CallCompletedCallback callback);
+ static void FireCallCompletedCallback(Isolate* isolate);
private:
+ static void InitializeOncePerProcessImpl();
static void InitializeOncePerProcess();
// True if engine is currently running
static bool is_running_;
// True if V8 has ever been run
- static bool has_been_setup_;
+ static bool has_been_set_up_;
// True if error has been signaled for current engine
// (reset to false if engine is restarted)
static bool has_fatal_error_;
@@ -123,6 +131,8 @@ class V8 : public AllStatic {
static bool has_been_disposed_;
// True if we are using the crankshaft optimizing compiler.
static bool use_crankshaft_;
+ // List of callbacks when a Call completes.
+ static List<CallCompletedCallback>* call_completed_callbacks_;
};
diff --git a/src/3rdparty/v8/src/v8conversions.h b/src/3rdparty/v8/src/v8conversions.h
index 1840e3a..0147d8c 100644
--- a/src/3rdparty/v8/src/v8conversions.h
+++ b/src/3rdparty/v8/src/v8conversions.h
@@ -34,13 +34,13 @@ namespace v8 {
namespace internal {
// Convert from Number object to C integer.
-static inline int32_t NumberToInt32(Object* number) {
+inline int32_t NumberToInt32(Object* number) {
if (number->IsSmi()) return Smi::cast(number)->value();
return DoubleToInt32(number->Number());
}
-static inline uint32_t NumberToUint32(Object* number) {
+inline uint32_t NumberToUint32(Object* number) {
if (number->IsSmi()) return Smi::cast(number)->value();
return DoubleToUint32(number->Number());
}
diff --git a/src/3rdparty/v8/src/v8globals.h b/src/3rdparty/v8/src/v8globals.h
index 40ce30c..bfc5e23 100644
--- a/src/3rdparty/v8/src/v8globals.h
+++ b/src/3rdparty/v8/src/v8globals.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,7 @@
#define V8_V8GLOBALS_H_
#include "globals.h"
+#include "checks.h"
namespace v8 {
namespace internal {
@@ -106,14 +107,12 @@ const uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32);
// -----------------------------------------------------------------------------
// Forward declarations for frequently used classes
-// (sorted alphabetically)
class AccessorInfo;
class Allocation;
class Arguments;
class Assembler;
class AssertNoAllocation;
-class BreakableStatement;
class Code;
class CodeGenerator;
class CodeStub;
@@ -123,21 +122,18 @@ class Debugger;
class DebugInfo;
class Descriptor;
class DescriptorArray;
-class Expression;
class ExternalReference;
class FixedArray;
-class FunctionEntry;
-class FunctionLiteral;
class FunctionTemplateInfo;
class MemoryChunk;
-class NumberDictionary;
+class SeededNumberDictionary;
+class UnseededNumberDictionary;
class StringDictionary;
template <typename T> class Handle;
class Heap;
class HeapObject;
class IC;
class InterceptorInfo;
-class IterationStatement;
class JSArray;
class JSFunction;
class JSObject;
@@ -148,32 +144,19 @@ class Map;
class MapSpace;
class MarkCompactCollector;
class NewSpace;
-class NodeVisitor;
class Object;
class MaybeObject;
class OldSpace;
-class Property;
class Foreign;
-class RegExpNode;
-struct RegExpCompileData;
-class RegExpTree;
-class RegExpCompiler;
-class RegExpVisitor;
class Scope;
-template<class Allocator = FreeStoreAllocationPolicy> class ScopeInfo;
-class SerializedScopeInfo;
+class ScopeInfo;
class Script;
-class Slot;
class Smi;
template <typename Config, class Allocator = FreeStoreAllocationPolicy>
class SplayTree;
-class Statement;
class String;
class Struct;
-class SwitchStatement;
-class AstVisitor;
class Variable;
-class VariableProxy;
class RelocInfo;
class Deserializer;
class MessageLocation;
@@ -324,30 +307,6 @@ typedef void (*StoreBufferCallback)(Heap* heap,
StoreBufferEvent event);
-// Type of properties.
-// Order of properties is significant.
-// Must fit in the BitField PropertyDetails::TypeField.
-// A copy of this is in mirror-debugger.js.
-enum PropertyType {
- NORMAL = 0, // only in slow mode
- FIELD = 1, // only in fast mode
- CONSTANT_FUNCTION = 2, // only in fast mode
- CALLBACKS = 3,
- HANDLER = 4, // only in lookup results, not in descriptors
- INTERCEPTOR = 5, // only in lookup results, not in descriptors
- MAP_TRANSITION = 6, // only in fast mode
- ELEMENTS_TRANSITION = 7,
- CONSTANT_TRANSITION = 8, // only in fast mode
- NULL_DESCRIPTOR = 9, // only in fast mode
- // All properties before MAP_TRANSITION are real.
- FIRST_PHANTOM_PROPERTY_TYPE = MAP_TRANSITION,
- // There are no IC stubs for NULL_DESCRIPTORS. Therefore,
- // NULL_DESCRIPTOR can be used as the type flag for IC stubs for
- // nonexistent properties.
- NONEXISTENT = NULL_DESCRIPTOR
-};
-
-
// Whether to remove map transitions and constant transitions from a
// DescriptorArray.
enum TransitionFlag {
@@ -502,6 +461,7 @@ enum CallKind {
enum ScopeType {
EVAL_SCOPE, // The top-level scope for an eval source.
FUNCTION_SCOPE, // The top-level scope for a function.
+ MODULE_SCOPE, // The scope introduced by a module literal
GLOBAL_SCOPE, // The top-level scope for a program or a top-level eval.
CATCH_SCOPE, // The scope introduced by catch.
BLOCK_SCOPE, // The scope introduced by a new block.
@@ -509,9 +469,9 @@ enum ScopeType {
};
-static const uint32_t kHoleNanUpper32 = 0x7FFFFFFF;
-static const uint32_t kHoleNanLower32 = 0xFFFFFFFF;
-static const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000;
+const uint32_t kHoleNanUpper32 = 0x7FFFFFFF;
+const uint32_t kHoleNanLower32 = 0xFFFFFFFF;
+const uint32_t kNaNOrInfinityLowerBoundUpper32 = 0x7FF00000;
const uint64_t kHoleNanInt64 =
(static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32;
@@ -550,6 +510,43 @@ enum VariableMode {
};
+// ES6 Draft Rev3 10.2 specifies declarative environment records with mutable
+// and immutable bindings that can be in two states: initialized and
+// uninitialized. In ES5 only immutable bindings have these two states. When
+// accessing a binding, it needs to be checked for initialization. However in
+// the following cases the binding is initialized immediately after creation
+// so the initialization check can always be skipped:
+// 1. Var declared local variables.
+// var foo;
+// 2. A local variable introduced by a function declaration.
+// function foo() {}
+// 3. Parameters
+// function x(foo) {}
+// 4. Catch bound variables.
+// try {} catch (foo) {}
+// 6. Function variables of named function expressions.
+// var x = function foo() {}
+// 7. Implicit binding of 'this'.
+// 8. Implicit binding of 'arguments' in functions.
+//
+// ES5 specified object environment records which are introduced by ES elements
+// such as Program and WithStatement that associate identifier bindings with the
+// properties of some object. In the specification only mutable bindings exist
+// (which may be non-writable) and have no distinct initialization step. However
+// V8 allows const declarations in global code with distinct creation and
+// initialization steps which are represented by non-writable properties in the
+// global object. As a result also these bindings need to be checked for
+// initialization.
+//
+// The following enum specifies a flag that indicates if the binding needs a
+// distinct initialization step (kNeedsInitialization) or if the binding is
+// immediately initialized upon creation (kCreatedInitialized).
+enum InitializationFlag {
+ kNeedsInitialization,
+ kCreatedInitialized
+};
+
+
enum ClearExceptionFlag {
KEEP_EXCEPTION,
CLEAR_EXCEPTION
diff --git a/src/3rdparty/v8/src/v8memory.h b/src/3rdparty/v8/src/v8memory.h
index 901e78d..f71de82 100644
--- a/src/3rdparty/v8/src/v8memory.h
+++ b/src/3rdparty/v8/src/v8memory.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -60,6 +60,10 @@ class Memory {
return *reinterpret_cast<int*>(addr);
}
+ static unsigned& unsigned_at(Address addr) {
+ return *reinterpret_cast<unsigned*>(addr);
+ }
+
static double& double_at(Address addr) {
return *reinterpret_cast<double*>(addr);
}
diff --git a/src/3rdparty/v8/src/v8natives.js b/src/3rdparty/v8/src/v8natives.js
index 9595ad1..f1e8084 100644
--- a/src/3rdparty/v8/src/v8natives.js
+++ b/src/3rdparty/v8/src/v8natives.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,18 +28,18 @@
// This file relies on the fact that the following declarations have been made
//
// in runtime.js:
-// const $Object = global.Object;
-// const $Boolean = global.Boolean;
-// const $Number = global.Number;
-// const $Function = global.Function;
-// const $Array = global.Array;
-// const $NaN = 0/0;
+// var $Object = global.Object;
+// var $Boolean = global.Boolean;
+// var $Number = global.Number;
+// var $Function = global.Function;
+// var $Array = global.Array;
+// var $NaN = 0/0;
//
// in math.js:
-// const $floor = MathFloor
+// var $floor = MathFloor
-const $isNaN = GlobalIsNaN;
-const $isFinite = GlobalIsFinite;
+var $isNaN = GlobalIsNaN;
+var $isFinite = GlobalIsFinite;
// ----------------------------------------------------------------------------
@@ -60,18 +60,6 @@ function InstallFunctions(object, attributes, functions) {
%ToFastProperties(object);
}
-// Emulates JSC by installing functions on a hidden prototype that
-// lies above the current object/prototype. This lets you override
-// functions on String.prototype etc. and then restore the old function
-// with delete. See http://code.google.com/p/chromium/issues/detail?id=1717
-function InstallFunctionsOnHiddenPrototype(object, attributes, functions) {
- %CheckIsBootstrapping();
- var hidden_prototype = new $Object();
- %SetHiddenPrototype(object, hidden_prototype);
- InstallFunctions(hidden_prototype, attributes, functions);
-}
-
-
// Prevents changes to the prototype of a built-infunction.
// The "prototype" property of the function object is made non-configurable,
// and the prototype object is made non-extensible. The latter prevents
@@ -139,8 +127,9 @@ function GlobalParseInt(string, radix) {
// The spec says ToString should be evaluated before ToInt32.
string = TO_STRING_INLINE(string);
radix = TO_INT32(radix);
- if (!(radix == 0 || (2 <= radix && radix <= 36)))
+ if (!(radix == 0 || (2 <= radix && radix <= 36))) {
return $NaN;
+ }
}
if (%_HasCachedArrayIndex(string) &&
@@ -295,7 +284,8 @@ function ObjectDefineGetter(name, fun) {
receiver = %GlobalReceiver(global);
}
if (!IS_SPEC_FUNCTION(fun)) {
- throw new $TypeError('Object.prototype.__defineGetter__: Expecting function');
+ throw new $TypeError(
+ 'Object.prototype.__defineGetter__: Expecting function');
}
var desc = new PropertyDescriptor();
desc.setGet(fun);
@@ -341,8 +331,9 @@ function ObjectLookupSetter(name) {
function ObjectKeys(obj) {
- if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object", ["keys"]);
+ if (!IS_SPEC_OBJECT(obj)) {
+ throw MakeTypeError("called_on_non_object", ["Object.keys"]);
+ }
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var names = CallTrap0(handler, "keys", DerivedKeysTrap);
@@ -473,7 +464,7 @@ function ToPropertyDescriptor(obj) {
// For Harmony proxies.
function ToCompletePropertyDescriptor(obj) {
- var desc = ToPropertyDescriptor(obj)
+ var desc = ToPropertyDescriptor(obj);
if (IsGenericDescriptor(desc) || IsDataDescriptor(desc)) {
if (!desc.hasValue()) desc.setValue(void 0);
if (!desc.hasWritable()) desc.setWritable(false);
@@ -669,6 +660,21 @@ function GetOwnProperty(obj, v) {
}
+// ES5 section 8.12.7.
+function Delete(obj, p, should_throw) {
+ var desc = GetOwnProperty(obj, p);
+ if (IS_UNDEFINED(desc)) return true;
+ if (desc.isConfigurable()) {
+ %DeleteProperty(obj, p, 0);
+ return true;
+ } else if (should_throw) {
+ throw MakeTypeError("define_disallowed", [p]);
+ } else {
+ return;
+ }
+}
+
+
// Harmony proxies.
function DefineProxyProperty(obj, p, attributes, should_throw) {
var handler = %GetHandler(obj);
@@ -828,10 +834,6 @@ function DefineObjectProperty(obj, p, desc, should_throw) {
}
%DefineOrRedefineDataProperty(obj, p, value, flag);
- } else if (IsGenericDescriptor(desc)) {
- // Step 12 - updating an existing accessor property with generic
- // descriptor. Changing flags only.
- %DefineOrRedefineAccessorProperty(obj, p, GETTER, current.getGet(), flag);
} else {
// There are 3 cases that lead here:
// Step 4b - defining a new accessor property.
@@ -839,12 +841,9 @@ function DefineObjectProperty(obj, p, desc, should_throw) {
// property.
// Step 12 - updating an existing accessor property with an accessor
// descriptor.
- if (desc.hasGetter()) {
- %DefineOrRedefineAccessorProperty(obj, p, GETTER, desc.getGet(), flag);
- }
- if (desc.hasSetter()) {
- %DefineOrRedefineAccessorProperty(obj, p, SETTER, desc.getSet(), flag);
- }
+ var getter = desc.hasGetter() ? desc.getGet() : null;
+ var setter = desc.hasSetter() ? desc.getSet() : null;
+ %DefineOrRedefineAccessorProperty(obj, p, getter, setter, flag);
}
return true;
}
@@ -852,11 +851,13 @@ function DefineObjectProperty(obj, p, desc, should_throw) {
// ES5 section 15.4.5.1.
function DefineArrayProperty(obj, p, desc, should_throw) {
- var length_desc = GetOwnProperty(obj, "length");
- var length = length_desc.getValue();
+ // Note that the length of an array is not actually stored as part of the
+ // property, hence we use generated code throughout this function instead of
+ // DefineObjectProperty() to modify its value.
- // Step 3 - Special handling for the length property.
+ // Step 3 - Special handling for length property.
if (p == "length") {
+ var length = obj.length;
if (!desc.hasValue()) {
return DefineObjectProperty(obj, "length", desc, should_throw);
}
@@ -864,14 +865,42 @@ function DefineArrayProperty(obj, p, desc, should_throw) {
if (new_length != ToNumber(desc.getValue())) {
throw new $RangeError('defineProperty() array length out of range');
}
- // TODO(1756): There still are some uncovered corner cases left on how to
- // handle changes to the length property of arrays.
- return DefineObjectProperty(obj, "length", desc, should_throw);
+ var length_desc = GetOwnProperty(obj, "length");
+ if (new_length != length && !length_desc.isWritable()) {
+ if (should_throw) {
+ throw MakeTypeError("redefine_disallowed", [p]);
+ } else {
+ return false;
+ }
+ }
+ var threw = false;
+ while (new_length < length--) {
+ if (!Delete(obj, ToString(length), false)) {
+ new_length = length + 1;
+ threw = true;
+ break;
+ }
+ }
+ // Make sure the below call to DefineObjectProperty() doesn't overwrite
+ // any magic "length" property by removing the value.
+ obj.length = new_length;
+ desc.value_ = void 0;
+ desc.hasValue_ = false;
+ if (!DefineObjectProperty(obj, "length", desc, should_throw) || threw) {
+ if (should_throw) {
+ throw MakeTypeError("redefine_disallowed", [p]);
+ } else {
+ return false;
+ }
+ }
+ return true;
}
// Step 4 - Special handling for array index.
var index = ToUint32(p);
if (index == ToNumber(p) && index != 4294967295) {
+ var length = obj.length;
+ var length_desc = GetOwnProperty(obj, "length");
if ((index >= length && !length_desc.isWritable()) ||
!DefineObjectProperty(obj, p, desc, true)) {
if (should_throw) {
@@ -881,9 +910,6 @@ function DefineArrayProperty(obj, p, desc, should_throw) {
}
}
if (index >= length) {
- // TODO(mstarzinger): We should actually set the value of the property
- // descriptor here and pass it to DefineObjectProperty(). Take a look at
- // ES5 section 15.4.5.1, step 4.e.i and 4.e.ii for details.
obj.length = index + 1;
}
return true;
@@ -909,17 +935,19 @@ function DefineOwnProperty(obj, p, desc, should_throw) {
// ES5 section 15.2.3.2.
function ObjectGetPrototypeOf(obj) {
- if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object", ["getPrototypeOf"]);
+ if (!IS_SPEC_OBJECT(obj)) {
+ throw MakeTypeError("called_on_non_object", ["Object.getPrototypeOf"]);
+ }
return %GetPrototype(obj);
}
// ES5 section 15.2.3.3
function ObjectGetOwnPropertyDescriptor(obj, p) {
- if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object",
- ["getOwnPropertyDescriptor"]);
+ if (!IS_SPEC_OBJECT(obj)) {
+ throw MakeTypeError("called_on_non_object",
+ ["Object.getOwnPropertyDescriptor"]);
+ }
var desc = GetOwnProperty(obj, p);
return FromPropertyDescriptor(desc);
}
@@ -932,11 +960,11 @@ function ToStringArray(obj, trap) {
}
var n = ToUint32(obj.length);
var array = new $Array(n);
- var names = {} // TODO(rossberg): use sets once they are ready.
+ var names = {}; // TODO(rossberg): use sets once they are ready.
for (var index = 0; index < n; index++) {
var s = ToString(obj[index]);
if (s in names) {
- throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s])
+ throw MakeTypeError("proxy_repeated_prop_name", [obj, trap, s]);
}
array[index] = s;
names[s] = 0;
@@ -947,9 +975,9 @@ function ToStringArray(obj, trap) {
// ES5 section 15.2.3.4.
function ObjectGetOwnPropertyNames(obj) {
- if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object", ["getOwnPropertyNames"]);
-
+ if (!IS_SPEC_OBJECT(obj)) {
+ throw MakeTypeError("called_on_non_object", ["Object.getOwnPropertyNames"]);
+ }
// Special handling for proxies.
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
@@ -966,8 +994,9 @@ function ObjectGetOwnPropertyNames(obj) {
if (%GetInterceptorInfo(obj) & 1) {
var indexedInterceptorNames =
%GetIndexedInterceptorElementNames(obj);
- if (indexedInterceptorNames)
+ if (indexedInterceptorNames) {
propertyNames = propertyNames.concat(indexedInterceptorNames);
+ }
}
// Find all the named properties.
@@ -993,8 +1022,9 @@ function ObjectGetOwnPropertyNames(obj) {
// We need to check for the exact property value since for intrinsic
// properties like toString if(propertySet["toString"]) will always
// succeed.
- if (propertySet[name] === true)
+ if (propertySet[name] === true) {
continue;
+ }
propertySet[name] = true;
propertyNames[j++] = name;
}
@@ -1019,7 +1049,7 @@ function ObjectCreate(proto, properties) {
// ES5 section 15.2.3.6.
function ObjectDefineProperty(obj, p, attributes) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["defineProperty"]);
+ throw MakeTypeError("called_on_non_object", ["Object.defineProperty"]);
}
var name = ToString(p);
if (%IsJSProxy(obj)) {
@@ -1070,8 +1100,9 @@ function GetOwnEnumerablePropertyNames(properties) {
// ES5 section 15.2.3.7.
function ObjectDefineProperties(obj, properties) {
- if (!IS_SPEC_OBJECT(obj))
- throw MakeTypeError("obj_ctor_property_non_object", ["defineProperties"]);
+ if (!IS_SPEC_OBJECT(obj)) {
+ throw MakeTypeError("called_on_non_object", ["Object.defineProperties"]);
+ }
var props = ToObject(properties);
var names = GetOwnEnumerablePropertyNames(props);
var descriptors = new InternalArray();
@@ -1103,11 +1134,10 @@ function ProxyFix(obj) {
// We just put in some half-reasonable defaults for now.
var prototype = new $Object();
$Object.defineProperty(prototype, "constructor",
- {value: obj, writable: true, enumerable: false, configrable: true});
- $Object.defineProperty(obj, "prototype",
- {value: prototype, writable: true, enumerable: false, configrable: false})
- $Object.defineProperty(obj, "length",
- {value: 0, writable: true, enumerable: false, configrable: false});
+ {value: obj, writable: true, enumerable: false, configurable: true});
+ // TODO(v8:1530): defineProperty does not handle prototype and length.
+ %FunctionSetPrototype(obj, prototype);
+ obj.length = 0;
} else {
%Fix(obj);
}
@@ -1118,7 +1148,7 @@ function ProxyFix(obj) {
// ES5 section 15.2.3.8.
function ObjectSeal(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["seal"]);
+ throw MakeTypeError("called_on_non_object", ["Object.seal"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1140,7 +1170,7 @@ function ObjectSeal(obj) {
// ES5 section 15.2.3.9.
function ObjectFreeze(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["freeze"]);
+ throw MakeTypeError("called_on_non_object", ["Object.freeze"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1163,7 +1193,7 @@ function ObjectFreeze(obj) {
// ES5 section 15.2.3.10
function ObjectPreventExtension(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["preventExtension"]);
+ throw MakeTypeError("called_on_non_object", ["Object.preventExtension"]);
}
if (%IsJSProxy(obj)) {
ProxyFix(obj);
@@ -1176,7 +1206,7 @@ function ObjectPreventExtension(obj) {
// ES5 section 15.2.3.11
function ObjectIsSealed(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isSealed"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isSealed"]);
}
if (%IsJSProxy(obj)) {
return false;
@@ -1197,7 +1227,7 @@ function ObjectIsSealed(obj) {
// ES5 section 15.2.3.12
function ObjectIsFrozen(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isFrozen"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isFrozen"]);
}
if (%IsJSProxy(obj)) {
return false;
@@ -1219,7 +1249,7 @@ function ObjectIsFrozen(obj) {
// ES5 section 15.2.3.13
function ObjectIsExtensible(obj) {
if (!IS_SPEC_OBJECT(obj)) {
- throw MakeTypeError("obj_ctor_property_non_object", ["isExtensible"]);
+ throw MakeTypeError("called_on_non_object", ["Object.isExtensible"]);
}
if (%IsJSProxy(obj)) {
return true;
@@ -1228,6 +1258,16 @@ function ObjectIsExtensible(obj) {
}
+// Harmony egal.
+function ObjectIs(obj1, obj2) {
+ if (obj1 === obj2) {
+ return (obj1 !== 0) || (1 / obj1 === 1 / obj2);
+ } else {
+ return (obj1 !== obj1) && (obj2 !== obj2);
+ }
+}
+
+
%SetCode($Object, function(x) {
if (%_IsConstructCall()) {
if (x == null) return this;
@@ -1267,6 +1307,7 @@ function SetUpObject() {
"getPrototypeOf", ObjectGetPrototypeOf,
"getOwnPropertyDescriptor", ObjectGetOwnPropertyDescriptor,
"getOwnPropertyNames", ObjectGetOwnPropertyNames,
+ "is", ObjectIs,
"isExtensible", ObjectIsExtensible,
"isFrozen", ObjectIsFrozen,
"isSealed", ObjectIsSealed,
@@ -1297,8 +1338,9 @@ function BooleanToString() {
function BooleanValueOf() {
// NOTE: Both Boolean objects and values can enter here as
// 'this'. This is not as dictated by ECMA-262.
- if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this))
+ if (!IS_BOOLEAN(this) && !IS_BOOLEAN_WRAPPER(this)) {
throw new $TypeError('Boolean.prototype.valueOf is not generic');
+ }
return %_ValueOf(this);
}
@@ -1338,8 +1380,9 @@ function NumberToString(radix) {
// 'this'. This is not as dictated by ECMA-262.
var number = this;
if (!IS_NUMBER(this)) {
- if (!IS_NUMBER_WRAPPER(this))
+ if (!IS_NUMBER_WRAPPER(this)) {
throw new $TypeError('Number.prototype.toString is not generic');
+ }
// Get the value of this number in case it's an object.
number = %_ValueOf(this);
}
@@ -1372,8 +1415,9 @@ function NumberToLocaleString() {
function NumberValueOf() {
// NOTE: Both Number objects and values can enter here as
// 'this'. This is not as dictated by ECMA-262.
- if (!IS_NUMBER(this) && !IS_NUMBER_WRAPPER(this))
+ if (!IS_NUMBER(this) && !IS_NUMBER_WRAPPER(this)) {
throw new $TypeError('Number.prototype.valueOf is not generic');
+ }
return %_ValueOf(this);
}
@@ -1399,7 +1443,8 @@ function NumberToExponential(fractionDigits) {
if (!IS_UNDEFINED(fractionDigits)) {
f = TO_INTEGER(fractionDigits);
if (f < 0 || f > 20) {
- throw new $RangeError("toExponential() argument must be between 0 and 20");
+ throw new $RangeError(
+ "toExponential() argument must be between 0 and 20");
}
}
if (IS_NULL_OR_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
@@ -1427,6 +1472,18 @@ function NumberToPrecision(precision) {
}
+// Harmony isFinite.
+function NumberIsFinite(number) {
+ return IS_NUMBER(number) && NUMBER_IS_FINITE(number);
+}
+
+
+// Harmony isNaN.
+function NumberIsNaN(number) {
+ return IS_NUMBER(number) && NUMBER_IS_NAN(number);
+}
+
+
// ----------------------------------------------------------------------------
function SetUpNumber() {
@@ -1443,7 +1500,8 @@ function SetUpNumber() {
DONT_ENUM | DONT_DELETE | READ_ONLY);
// ECMA-262 section 15.7.3.2.
- %SetProperty($Number, "MIN_VALUE", 5e-324, DONT_ENUM | DONT_DELETE | READ_ONLY);
+ %SetProperty($Number, "MIN_VALUE", 5e-324,
+ DONT_ENUM | DONT_DELETE | READ_ONLY);
// ECMA-262 section 15.7.3.3.
%SetProperty($Number, "NaN", $NaN, DONT_ENUM | DONT_DELETE | READ_ONLY);
@@ -1470,6 +1528,10 @@ function SetUpNumber() {
"toExponential", NumberToExponential,
"toPrecision", NumberToPrecision
));
+ InstallFunctions($Number, DONT_ENUM, $Array(
+ "isFinite", NumberIsFinite,
+ "isNaN", NumberIsNaN
+ ));
}
SetUpNumber();
@@ -1561,7 +1623,8 @@ function FunctionBind(this_arg) { // Length is 1.
}
// This runtime function finds any remaining arguments on the stack,
// so we don't pass the arguments object.
- var result = %FunctionBindArguments(boundFunction, this, this_arg, new_length);
+ var result = %FunctionBindArguments(boundFunction, this,
+ this_arg, new_length);
// We already have caller and arguments properties on functions,
// which are non-configurable. It therefore makes no sence to
diff --git a/src/3rdparty/v8/src/v8threads.cc b/src/3rdparty/v8/src/v8threads.cc
index 3881d66..fd8d536 100644
--- a/src/3rdparty/v8/src/v8threads.cc
+++ b/src/3rdparty/v8/src/v8threads.cc
@@ -154,7 +154,7 @@ namespace internal {
bool ThreadManager::RestoreThread() {
ASSERT(IsLockedByCurrentThread());
- // First check whether the current thread has been 'lazily archived', ie
+ // First check whether the current thread has been 'lazily archived', i.e.
// not archived at all. If that is the case we put the state storage we
// had prepared back in the free list, since we didn't need it after all.
if (lazily_archived_thread_.Equals(ThreadId::Current())) {
diff --git a/src/3rdparty/v8/src/v8threads.h b/src/3rdparty/v8/src/v8threads.h
index 4002bb3..a2aee4e 100644
--- a/src/3rdparty/v8/src/v8threads.h
+++ b/src/3rdparty/v8/src/v8threads.h
@@ -72,7 +72,7 @@ class ThreadState {
};
-// Defined in top.h
+// Defined in isolate.h.
class ThreadLocalTop;
diff --git a/src/3rdparty/v8/src/v8utils.cc b/src/3rdparty/v8/src/v8utils.cc
index bf0e05d..042a60f 100644
--- a/src/3rdparty/v8/src/v8utils.cc
+++ b/src/3rdparty/v8/src/v8utils.cc
@@ -316,7 +316,7 @@ bool MemoryMappedExternalResource::EnsureIsAscii(bool abort_if_failed) const {
for (const char* p = data_; p < end; p++) {
char c = *p;
if ((c & 0x80) != 0) {
- // Non-ascii detected:
+ // Non-ASCII detected:
is_ascii = false;
// Report the error and abort if appropriate:
@@ -329,7 +329,7 @@ bool MemoryMappedExternalResource::EnsureIsAscii(bool abort_if_failed) const {
c, filename_, line_no, char_no);
// Allow for some context up to kNumberOfLeadingContextChars chars
- // before the offending non-ascii char to help the user see where
+ // before the offending non-ASCII char to help the user see where
// the offending char is.
const int kNumberOfLeadingContextChars = 10;
const char* err_context = p - kNumberOfLeadingContextChars;
@@ -345,7 +345,7 @@ bool MemoryMappedExternalResource::EnsureIsAscii(bool abort_if_failed) const {
OS::Abort();
}
- break; // Non-ascii detected. No need to continue scanning.
+ break; // Non-ASCII detected. No need to continue scanning.
}
if (c == '\n') {
start_of_line = p;
diff --git a/src/3rdparty/v8/src/v8utils.h b/src/3rdparty/v8/src/v8utils.h
index c854f04..c73222a 100644
--- a/src/3rdparty/v8/src/v8utils.h
+++ b/src/3rdparty/v8/src/v8utils.h
@@ -143,7 +143,7 @@ inline void CopyWords(T* dst, T* src, int num_words) {
template <typename T, typename U>
-static inline void MemsetPointer(T** dest, U* value, int counter) {
+inline void MemsetPointer(T** dest, U* value, int counter) {
#ifdef DEBUG
T* a = NULL;
U* b = NULL;
@@ -202,7 +202,7 @@ Vector<const char> ReadFile(FILE* file,
// Copy from ASCII/16bit chars to ASCII/16bit chars.
template <typename sourcechar, typename sinkchar>
-static inline void CopyChars(sinkchar* dest, const sourcechar* src, int chars) {
+inline void CopyChars(sinkchar* dest, const sourcechar* src, int chars) {
sinkchar* limit = dest + chars;
#ifdef V8_HOST_CAN_READ_UNALIGNED
if (sizeof(*dest) == sizeof(*src)) {
diff --git a/src/3rdparty/v8/src/variables.cc b/src/3rdparty/v8/src/variables.cc
index 1887935..32ad5bc 100644
--- a/src/3rdparty/v8/src/variables.cc
+++ b/src/3rdparty/v8/src/variables.cc
@@ -58,20 +58,26 @@ Variable::Variable(Scope* scope,
Handle<String> name,
VariableMode mode,
bool is_valid_LHS,
- Kind kind)
+ Kind kind,
+ InitializationFlag initialization_flag,
+ Interface* interface)
: scope_(scope),
name_(name),
mode_(mode),
kind_(kind),
location_(UNALLOCATED),
index_(-1),
+ initializer_position_(RelocInfo::kNoPosition),
local_if_not_shadowed_(NULL),
is_valid_LHS_(is_valid_LHS),
- is_accessed_from_inner_scope_(false),
+ force_context_allocation_(false),
is_used_(false),
- is_qml_global_(false) {
- // names must be canonicalized for fast equality checks
+ initialization_flag_(initialization_flag),
+ interface_(interface) {
+ // Names must be canonicalized for fast equality checks.
ASSERT(name->IsSymbol());
+ // Var declared variables never need initialization.
+ ASSERT(!(mode == VAR && initialization_flag == kNeedsInitialization));
}
@@ -81,4 +87,12 @@ bool Variable::is_global() const {
return mode_ != TEMPORARY && scope_ != NULL && scope_->is_global_scope();
}
+
+int Variable::CompareIndex(Variable* const* v, Variable* const* w) {
+ int x = (*v)->index();
+ int y = (*w)->index();
+ // Consider sorting them according to type as well?
+ return x - y;
+}
+
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/variables.h b/src/3rdparty/v8/src/variables.h
index d1112ff..f49b6e1 100644
--- a/src/3rdparty/v8/src/variables.h
+++ b/src/3rdparty/v8/src/variables.h
@@ -29,6 +29,7 @@
#define V8_VARIABLES_H_
#include "zone.h"
+#include "interface.h"
namespace v8 {
namespace internal {
@@ -77,7 +78,9 @@ class Variable: public ZoneObject {
Handle<String> name,
VariableMode mode,
bool is_valid_lhs,
- Kind kind);
+ Kind kind,
+ InitializationFlag initialization_flag,
+ Interface* interface = Interface::NewValue());
// Printing support
static const char* Mode2String(VariableMode mode);
@@ -92,16 +95,19 @@ class Variable: public ZoneObject {
Handle<String> name() const { return name_; }
VariableMode mode() const { return mode_; }
- bool is_accessed_from_inner_scope() const {
- return is_accessed_from_inner_scope_;
+ bool has_forced_context_allocation() const {
+ return force_context_allocation_;
}
- void MarkAsAccessedFromInnerScope() {
+ void ForceContextAllocation() {
ASSERT(mode_ != TEMPORARY);
- is_accessed_from_inner_scope_ = true;
+ force_context_allocation_ = true;
}
bool is_used() { return is_used_; }
void set_is_used(bool flag) { is_used_ = flag; }
+ int initializer_position() { return initializer_position_; }
+ void set_initializer_position(int pos) { initializer_position_ = pos; }
+
bool IsVariable(Handle<String> n) const {
return !is_this() && name().is_identical_to(n);
}
@@ -123,9 +129,7 @@ class Variable: public ZoneObject {
mode_ == CONST_HARMONY);
}
bool binding_needs_init() const {
- return (mode_ == LET ||
- mode_ == CONST ||
- mode_ == CONST_HARMONY);
+ return initialization_flag_ == kNeedsInitialization;
}
bool is_global() const;
@@ -148,14 +152,18 @@ class Variable: public ZoneObject {
Location location() const { return location_; }
int index() const { return index_; }
+ InitializationFlag initialization_flag() const {
+ return initialization_flag_;
+ }
+ Interface* interface() const { return interface_; }
void AllocateTo(Location location, int index) {
location_ = location;
index_ = index;
}
- bool is_qml_global() const { return is_qml_global_; }
- void set_is_qml_global(bool is_qml_global) { is_qml_global_ = is_qml_global; }
+ static int CompareIndex(Variable* const* v, Variable* const* w);
+
private:
Scope* scope_;
Handle<String> name_;
@@ -163,6 +171,7 @@ class Variable: public ZoneObject {
Kind kind_;
Location location_;
int index_;
+ int initializer_position_;
// If this field is set, this variable references the stored locally bound
// variable, but it might be shadowed by variable bindings introduced by
@@ -174,11 +183,12 @@ class Variable: public ZoneObject {
bool is_valid_LHS_;
// Usage info.
- bool is_accessed_from_inner_scope_; // set by variable resolver
+ bool force_context_allocation_; // set by variable resolver
bool is_used_;
+ InitializationFlag initialization_flag_;
- // QML info
- bool is_qml_global_;
+ // Module type info.
+ Interface* interface_;
};
diff --git a/src/3rdparty/v8/src/version.cc b/src/3rdparty/v8/src/version.cc
index f358be7..6b07103 100644
--- a/src/3rdparty/v8/src/version.cc
+++ b/src/3rdparty/v8/src/version.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -33,12 +33,12 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
-#define MINOR_VERSION 7
-#define BUILD_NUMBER 4
+#define MINOR_VERSION 10
+#define BUILD_NUMBER 1
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
-#define IS_CANDIDATE_VERSION 1
+#define IS_CANDIDATE_VERSION 0
// Define SONAME to have the SCons build the put a specific SONAME into the
// shared library instead the generic SONAME generated from the V8 version
diff --git a/src/3rdparty/v8/src/win32-headers.h b/src/3rdparty/v8/src/win32-headers.h
index 9c7771f..5d9c89e 100644
--- a/src/3rdparty/v8/src/win32-headers.h
+++ b/src/3rdparty/v8/src/win32-headers.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
diff --git a/src/3rdparty/v8/src/x64/assembler-x64-inl.h b/src/3rdparty/v8/src/x64/assembler-x64-inl.h
index f7b87ec..a9cc2ef 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64-inl.h
+++ b/src/3rdparty/v8/src/x64/assembler-x64-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,6 +28,8 @@
#ifndef V8_X64_ASSEMBLER_X64_INL_H_
#define V8_X64_ASSEMBLER_X64_INL_H_
+#include "x64/assembler-x64.h"
+
#include "cpu.h"
#include "debug.h"
#include "v8memory.h"
@@ -224,16 +226,18 @@ Address RelocInfo::target_address() {
Address RelocInfo::target_address_address() {
- ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
+ ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY
+ || rmode_ == EMBEDDED_OBJECT
+ || rmode_ == EXTERNAL_REFERENCE);
return reinterpret_cast<Address>(pc_);
}
int RelocInfo::target_address_size() {
if (IsCodedSpecially()) {
- return Assembler::kCallTargetSize;
+ return Assembler::kSpecialTargetSize;
} else {
- return Assembler::kExternalTargetSize;
+ return kPointerSize;
}
}
@@ -260,7 +264,7 @@ Object* RelocInfo::target_object() {
}
-Handle<Object> RelocInfo::target_object_handle(Assembler *origin) {
+Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
if (rmode_ == EMBEDDED_OBJECT) {
return Memory::Object_Handle_at(pc_);
@@ -399,7 +403,7 @@ void RelocInfo::Visit(ObjectVisitor* visitor) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- visitor->VisitExternalReference(target_reference_address());
+ visitor->VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
@@ -427,7 +431,7 @@ void RelocInfo::Visit(Heap* heap) {
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- StaticVisitor::VisitExternalReference(target_reference_address());
+ StaticVisitor::VisitExternalReference(this);
CPU::FlushICache(pc_, sizeof(Address));
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
diff --git a/src/3rdparty/v8/src/x64/assembler-x64.cc b/src/3rdparty/v8/src/x64/assembler-x64.cc
index 1c4980e..2f0c542 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/assembler-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -383,7 +383,7 @@ Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
}
#endif
- // Setup buffer pointers.
+ // Set up buffer pointers.
ASSERT(buffer_ != NULL);
pc_ = buffer_;
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
@@ -412,7 +412,7 @@ void Assembler::GetCode(CodeDesc* desc) {
// Finalize code (at this point overflow() may be true, but the gap ensures
// that we are still not overlapping instructions and relocation info).
ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
- // Setup code descriptor.
+ // Set up code descriptor.
desc->buffer = buffer_;
desc->buffer_size = buffer_size_;
desc->instr_size = pc_offset();
@@ -426,13 +426,7 @@ void Assembler::GetCode(CodeDesc* desc) {
void Assembler::Align(int m) {
ASSERT(IsPowerOf2(m));
int delta = (m - (pc_offset() & (m - 1))) & (m - 1);
- while (delta >= 9) {
- nop(9);
- delta -= 9;
- }
- if (delta > 0) {
- nop(delta);
- }
+ Nop(delta);
}
@@ -441,6 +435,15 @@ void Assembler::CodeTargetAlign() {
}
+bool Assembler::IsNop(Address addr) {
+ Address a = addr;
+ while (*a == 0x66) a++;
+ if (*a == 0x90) return true;
+ if (a[0] == 0xf && a[1] == 0x1f) return true;
+ return false;
+}
+
+
void Assembler::bind_to(Label* L, int pos) {
ASSERT(!L->is_bound()); // Label may only be bound once.
ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid.
@@ -499,7 +502,7 @@ void Assembler::GrowBuffer() {
V8::FatalProcessOutOfMemory("Assembler::GrowBuffer");
}
- // Setup new buffer.
+ // Set up new buffer.
desc.buffer = NewArray<byte>(desc.buffer_size);
desc.instr_size = pc_offset();
desc.reloc_size =
@@ -772,7 +775,7 @@ void Assembler::immediate_arithmetic_op_8(byte subcode,
Register dst,
Immediate src) {
EnsureSpace ensure_space(this);
- if (dst.code() > 3) {
+ if (!dst.is_byte_register()) {
// Use 64-bit mode byte registers.
emit_rex_64(dst);
}
@@ -1056,7 +1059,7 @@ void Assembler::decl(const Operand& dst) {
void Assembler::decb(Register dst) {
EnsureSpace ensure_space(this);
- if (dst.code() > 3) {
+ if (!dst.is_byte_register()) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(dst);
}
@@ -1384,7 +1387,7 @@ void Assembler::leave() {
void Assembler::movb(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
- if (dst.code() > 3) {
+ if (!dst.is_byte_register()) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(dst, src);
} else {
@@ -1397,7 +1400,7 @@ void Assembler::movb(Register dst, const Operand& src) {
void Assembler::movb(Register dst, Immediate imm) {
EnsureSpace ensure_space(this);
- if (dst.code() > 3) {
+ if (!dst.is_byte_register()) {
emit_rex_32(dst);
}
emit(0xB0 + dst.low_bits());
@@ -1407,7 +1410,7 @@ void Assembler::movb(Register dst, Immediate imm) {
void Assembler::movb(const Operand& dst, Register src) {
EnsureSpace ensure_space(this);
- if (src.code() > 3) {
+ if (!src.is_byte_register()) {
emit_rex_32(src, dst);
} else {
emit_optional_rex_32(src, dst);
@@ -1637,6 +1640,8 @@ void Assembler::movsxlq(Register dst, const Operand& src) {
void Assembler::movzxbq(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
+ // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
+ // there is no need to make this a 64 bit operation.
emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0xB6);
@@ -1763,7 +1768,7 @@ void Assembler::notl(Register dst) {
}
-void Assembler::nop(int n) {
+void Assembler::Nop(int n) {
// The recommended muti-byte sequences of NOP instructions from the Intel 64
// and IA-32 Architectures Software Developer's Manual.
//
@@ -1778,73 +1783,64 @@ void Assembler::nop(int n) {
// 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00
// 00000000H] 00H
- ASSERT(1 <= n);
- ASSERT(n <= 9);
EnsureSpace ensure_space(this);
- switch (n) {
- case 1:
- emit(0x90);
- return;
- case 2:
- emit(0x66);
- emit(0x90);
- return;
- case 3:
- emit(0x0f);
- emit(0x1f);
- emit(0x00);
- return;
- case 4:
- emit(0x0f);
- emit(0x1f);
- emit(0x40);
- emit(0x00);
- return;
- case 5:
- emit(0x0f);
- emit(0x1f);
- emit(0x44);
- emit(0x00);
- emit(0x00);
- return;
- case 6:
- emit(0x66);
- emit(0x0f);
- emit(0x1f);
- emit(0x44);
- emit(0x00);
- emit(0x00);
- return;
- case 7:
- emit(0x0f);
- emit(0x1f);
- emit(0x80);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- return;
- case 8:
- emit(0x0f);
- emit(0x1f);
- emit(0x84);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- return;
- case 9:
- emit(0x66);
- emit(0x0f);
- emit(0x1f);
- emit(0x84);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- emit(0x00);
- return;
+ while (n > 0) {
+ switch (n) {
+ case 2:
+ emit(0x66);
+ case 1:
+ emit(0x90);
+ return;
+ case 3:
+ emit(0x0f);
+ emit(0x1f);
+ emit(0x00);
+ return;
+ case 4:
+ emit(0x0f);
+ emit(0x1f);
+ emit(0x40);
+ emit(0x00);
+ return;
+ case 6:
+ emit(0x66);
+ case 5:
+ emit(0x0f);
+ emit(0x1f);
+ emit(0x44);
+ emit(0x00);
+ emit(0x00);
+ return;
+ case 7:
+ emit(0x0f);
+ emit(0x1f);
+ emit(0x80);
+ emit(0x00);
+ emit(0x00);
+ emit(0x00);
+ emit(0x00);
+ return;
+ default:
+ case 11:
+ emit(0x66);
+ n--;
+ case 10:
+ emit(0x66);
+ n--;
+ case 9:
+ emit(0x66);
+ n--;
+ case 8:
+ emit(0x0f);
+ emit(0x1f);
+ emit(0x84);
+ emit(0x00);
+ emit(0x00);
+ emit(0x00);
+ emit(0x00);
+ emit(0x00);
+ n -= 8;
+ }
}
}
@@ -1937,7 +1933,7 @@ void Assembler::setcc(Condition cc, Register reg) {
}
EnsureSpace ensure_space(this);
ASSERT(is_uint4(cc));
- if (reg.code() > 3) { // Use x64 byte registers, where different.
+ if (!reg.is_byte_register()) { // Use x64 byte registers, where different.
emit_rex_32(reg);
}
emit(0x0F);
@@ -2002,7 +1998,7 @@ void Assembler::testb(Register dst, Register src) {
emit(0x84);
emit_modrm(src, dst);
} else {
- if (dst.code() > 3 || src.code() > 3) {
+ if (!dst.is_byte_register() || !src.is_byte_register()) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(dst, src);
}
@@ -2019,7 +2015,7 @@ void Assembler::testb(Register reg, Immediate mask) {
emit(0xA8);
emit(mask.value_); // Low byte emitted.
} else {
- if (reg.code() > 3) {
+ if (!reg.is_byte_register()) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(reg);
}
@@ -2042,7 +2038,7 @@ void Assembler::testb(const Operand& op, Immediate mask) {
void Assembler::testb(const Operand& op, Register reg) {
EnsureSpace ensure_space(this);
- if (reg.code() > 3) {
+ if (!reg.is_byte_register()) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(reg, op);
} else {
@@ -2299,6 +2295,13 @@ void Assembler::fsin() {
}
+void Assembler::fptan() {
+ EnsureSpace ensure_space(this);
+ emit(0xD9);
+ emit(0xF2);
+}
+
+
void Assembler::fyl2x() {
EnsureSpace ensure_space(this);
emit(0xD9);
@@ -2306,6 +2309,27 @@ void Assembler::fyl2x() {
}
+void Assembler::f2xm1() {
+ EnsureSpace ensure_space(this);
+ emit(0xD9);
+ emit(0xF0);
+}
+
+
+void Assembler::fscale() {
+ EnsureSpace ensure_space(this);
+ emit(0xD9);
+ emit(0xFD);
+}
+
+
+void Assembler::fninit() {
+ EnsureSpace ensure_space(this);
+ emit(0xDB);
+ emit(0xE3);
+}
+
+
void Assembler::fadd(int i) {
EnsureSpace ensure_space(this);
emit_farith(0xDC, 0xC0, i);
@@ -2565,7 +2589,8 @@ void Assembler::movdqa(XMMRegister dst, const Operand& src) {
void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
- ASSERT(is_uint2(imm8));
+ ASSERT(CpuFeatures::IsSupported(SSE4_1));
+ ASSERT(is_uint8(imm8));
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
@@ -3020,8 +3045,6 @@ bool RelocInfo::IsCodedSpecially() {
return (1 << rmode_) & kApplyMask;
}
-
-
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
diff --git a/src/3rdparty/v8/src/x64/assembler-x64.h b/src/3rdparty/v8/src/x64/assembler-x64.h
index 0d87053..60b29e6 100644
--- a/src/3rdparty/v8/src/x64/assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/assembler-x64.h
@@ -30,7 +30,7 @@
// The original source code covered by the above license above has been
// modified significantly by Google Inc.
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// A lightweight X64 Assembler.
@@ -45,22 +45,22 @@ namespace internal {
// Utility functions
// Test whether a 64-bit value is in a specific range.
-static inline bool is_uint32(int64_t x) {
+inline bool is_uint32(int64_t x) {
static const uint64_t kMaxUInt32 = V8_UINT64_C(0xffffffff);
return static_cast<uint64_t>(x) <= kMaxUInt32;
}
-static inline bool is_int32(int64_t x) {
+inline bool is_int32(int64_t x) {
static const int64_t kMinInt32 = -V8_INT64_C(0x80000000);
return is_uint32(x - kMinInt32);
}
-static inline bool uint_is_int32(uint64_t x) {
+inline bool uint_is_int32(uint64_t x) {
static const uint64_t kMaxInt32 = V8_UINT64_C(0x7fffffff);
return x <= kMaxInt32;
}
-static inline bool is_uint32(uint64_t x) {
+inline bool is_uint32(uint64_t x) {
static const uint64_t kMaxUInt32 = V8_UINT64_C(0xffffffff);
return x <= kMaxUInt32;
}
@@ -131,6 +131,8 @@ struct Register {
}
bool is_valid() const { return 0 <= code_ && code_ < kNumRegisters; }
bool is(Register reg) const { return code_ == reg.code_; }
+ // rax, rbx, rcx and rdx are byte registers, the rest are not.
+ bool is_byte_register() const { return code_ <= 3; }
int code() const {
ASSERT(is_valid());
return code_;
@@ -159,23 +161,41 @@ struct Register {
static const int kAllocationIndexByRegisterCode[kNumRegisters];
};
-const Register rax = { 0 };
-const Register rcx = { 1 };
-const Register rdx = { 2 };
-const Register rbx = { 3 };
-const Register rsp = { 4 };
-const Register rbp = { 5 };
-const Register rsi = { 6 };
-const Register rdi = { 7 };
-const Register r8 = { 8 };
-const Register r9 = { 9 };
-const Register r10 = { 10 };
-const Register r11 = { 11 };
-const Register r12 = { 12 };
-const Register r13 = { 13 };
-const Register r14 = { 14 };
-const Register r15 = { 15 };
-const Register no_reg = { -1 };
+const int kRegister_rax_Code = 0;
+const int kRegister_rcx_Code = 1;
+const int kRegister_rdx_Code = 2;
+const int kRegister_rbx_Code = 3;
+const int kRegister_rsp_Code = 4;
+const int kRegister_rbp_Code = 5;
+const int kRegister_rsi_Code = 6;
+const int kRegister_rdi_Code = 7;
+const int kRegister_r8_Code = 8;
+const int kRegister_r9_Code = 9;
+const int kRegister_r10_Code = 10;
+const int kRegister_r11_Code = 11;
+const int kRegister_r12_Code = 12;
+const int kRegister_r13_Code = 13;
+const int kRegister_r14_Code = 14;
+const int kRegister_r15_Code = 15;
+const int kRegister_no_reg_Code = -1;
+
+const Register rax = { kRegister_rax_Code };
+const Register rcx = { kRegister_rcx_Code };
+const Register rdx = { kRegister_rdx_Code };
+const Register rbx = { kRegister_rbx_Code };
+const Register rsp = { kRegister_rsp_Code };
+const Register rbp = { kRegister_rbp_Code };
+const Register rsi = { kRegister_rsi_Code };
+const Register rdi = { kRegister_rdi_Code };
+const Register r8 = { kRegister_r8_Code };
+const Register r9 = { kRegister_r9_Code };
+const Register r10 = { kRegister_r10_Code };
+const Register r11 = { kRegister_r11_Code };
+const Register r12 = { kRegister_r12_Code };
+const Register r13 = { kRegister_r13_Code };
+const Register r14 = { kRegister_r14_Code };
+const Register r15 = { kRegister_r15_Code };
+const Register no_reg = { kRegister_no_reg_Code };
struct XMMRegister {
@@ -557,8 +577,8 @@ class Assembler : public AssemblerBase {
// This sets the branch destination (which is in the instruction on x64).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
set_target_address_at(instruction_payload, target);
}
@@ -571,8 +591,7 @@ class Assembler : public AssemblerBase {
inline Handle<Object> code_target_object_handle_at(Address pc);
// Number of bytes taken up by the branch target in the code.
- static const int kCallTargetSize = 4; // Use 32-bit displacement.
- static const int kExternalTargetSize = 8; // Use 64-bit absolute.
+ static const int kSpecialTargetSize = 4; // Use 32-bit displacement.
// Distance between the address of the code target in the call instruction
// and the return address pushed on the stack.
static const int kCallTargetAddressOffset = 4; // Use 32-bit displacement.
@@ -636,6 +655,7 @@ class Assembler : public AssemblerBase {
// possible to align the pc offset to a multiple
// of m, where m must be a power of 2.
void Align(int m);
+ void Nop(int bytes = 1);
// Aligns code to something that's optimal for a jump target for the platform.
void CodeTargetAlign();
@@ -649,7 +669,6 @@ class Assembler : public AssemblerBase {
void push_imm32(int32_t imm32);
void push(Register src);
void push(const Operand& src);
- void push(Handle<Object> handle);
void pop(Register dst);
void pop(const Operand& dst);
@@ -1155,7 +1174,6 @@ class Assembler : public AssemblerBase {
void hlt();
void int3();
void nop();
- void nop(int n);
void rdtsc();
void ret(int imm16);
void setcc(Condition cc, Register reg);
@@ -1276,7 +1294,11 @@ class Assembler : public AssemblerBase {
void fsin();
void fcos();
+ void fptan();
void fyl2x();
+ void f2xm1();
+ void fscale();
+ void fninit();
void frndint();
@@ -1398,7 +1420,7 @@ class Assembler : public AssemblerBase {
return static_cast<int>(reloc_info_writer.pos() - pc_);
}
- static bool IsNop(Address addr) { return *addr == 0x90; }
+ static bool IsNop(Address addr);
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512*MB;
diff --git a/src/3rdparty/v8/src/x64/builtins-x64.cc b/src/3rdparty/v8/src/x64/builtins-x64.cc
index e423ae3..4e037ff 100644
--- a/src/3rdparty/v8/src/x64/builtins-x64.cc
+++ b/src/3rdparty/v8/src/x64/builtins-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -73,49 +73,14 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
-void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
+static void Generate_JSConstructStubHelper(MacroAssembler* masm,
+ bool is_api_function,
+ bool count_constructions) {
// ----------- S t a t e -------------
// -- rax: number of arguments
// -- rdi: constructor function
// -----------------------------------
- Label slow, non_function_call;
- // Check that function is not a smi.
- __ JumpIfSmi(rdi, &non_function_call);
- // Check that function is a JSFunction.
- __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
- __ j(not_equal, &slow);
-
- // Jump to the function-specific construct stub.
- __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
- __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
- __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
- __ jmp(rbx);
-
- // rdi: called object
- // rax: number of arguments
- // rcx: object map
- Label do_call;
- __ bind(&slow);
- __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function_call);
- __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
- __ jmp(&do_call);
-
- __ bind(&non_function_call);
- __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ bind(&do_call);
- // Set expected number of arguments to zero (not changing rax).
- __ Set(rbx, 0);
- __ SetCallKind(rcx, CALL_AS_METHOD);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
- RelocInfo::CODE_TARGET);
-}
-
-
-static void Generate_JSConstructStubHelper(MacroAssembler* masm,
- bool is_api_function,
- bool count_constructions) {
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
@@ -337,7 +302,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ push(rbx);
__ push(rbx);
- // Setup pointer to last argument.
+ // Set up pointer to last argument.
__ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
// Copy arguments and receiver to the expression stack.
@@ -364,6 +329,11 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
NullCallWrapper(), CALL_AS_METHOD);
}
+ // Store offset of return address for deoptimizer.
+ if (!is_api_function && !count_constructions) {
+ masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
+ }
+
// Restore context from the frame.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -515,8 +485,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Invoke the code.
if (is_construct) {
// Expects rdi to hold function pointer.
- __ Call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ __ CallStub(&stub);
} else {
ParameterCount actual(rax);
// Function must be in rdi.
@@ -1007,9 +977,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
const int initial_capacity = JSArray::kPreallocatedArrayElements;
STATIC_ASSERT(initial_capacity >= 0);
- // Load the initial map from the array function.
- __ movq(scratch1, FieldOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch2, scratch1);
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
@@ -1108,10 +1076,7 @@ static void AllocateJSArray(MacroAssembler* masm,
Register scratch,
bool fill_with_hole,
Label* gc_required) {
- // Load the initial map from the array function.
- __ movq(elements_array,
- FieldOperand(array_function,
- JSFunction::kPrototypeOrInitialMapOffset));
+ __ LoadInitialArrayMap(array_function, scratch, elements_array);
if (FLAG_debug_code) { // Assert that array size is not zero.
__ testq(array_size, array_size);
@@ -1198,8 +1163,9 @@ static void AllocateJSArray(MacroAssembler* masm,
// Both registers are preserved by this code so no need to differentiate between
// a construct call and a normal call.
static void ArrayNativeCode(MacroAssembler* masm,
- Label *call_generic_code) {
- Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array;
+ Label* call_generic_code) {
+ Label argc_one_or_more, argc_two_or_more, empty_array, not_empty_array,
+ has_non_smi_element, finish, cant_transition_map, not_double;
// Check for array construction with zero arguments.
__ testq(rax, rax);
@@ -1304,8 +1270,11 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ movq(rcx, rax);
__ jmp(&entry);
__ bind(&loop);
- __ movq(kScratchRegister, Operand(r9, rcx, times_pointer_size, 0));
- __ movq(Operand(rdx, 0), kScratchRegister);
+ __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
+ if (FLAG_smi_only_arrays) {
+ __ JumpIfNotSmi(r8, &has_non_smi_element);
+ }
+ __ movq(Operand(rdx, 0), r8);
__ addq(rdx, Immediate(kPointerSize));
__ bind(&entry);
__ decq(rcx);
@@ -1316,11 +1285,81 @@ static void ArrayNativeCode(MacroAssembler* masm,
// rbx: JSArray
// esp[0]: return address
// esp[8]: last argument
+ __ bind(&finish);
__ pop(rcx);
__ lea(rsp, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
__ push(rcx);
__ movq(rax, rbx);
__ ret(0);
+
+ __ bind(&has_non_smi_element);
+ // Double values are handled by the runtime.
+ __ CheckMap(r8,
+ masm->isolate()->factory()->heap_number_map(),
+ &not_double,
+ DONT_DO_SMI_CHECK);
+ __ bind(&cant_transition_map);
+ __ UndoAllocationInNewSpace(rbx);
+ __ jmp(call_generic_code);
+
+ __ bind(&not_double);
+ // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
+ // rbx: JSArray
+ __ movq(r11, FieldOperand(rbx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ r11,
+ kScratchRegister,
+ &cant_transition_map);
+
+ __ movq(FieldOperand(rbx, HeapObject::kMapOffset), r11);
+ __ RecordWriteField(rbx, HeapObject::kMapOffset, r11, r8,
+ kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Finish the array initialization loop.
+ Label loop2;
+ __ bind(&loop2);
+ __ movq(r8, Operand(r9, rcx, times_pointer_size, 0));
+ __ movq(Operand(rdx, 0), r8);
+ __ addq(rdx, Immediate(kPointerSize));
+ __ decq(rcx);
+ __ j(greater_equal, &loop2);
+ __ jmp(&finish);
+}
+
+
+void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : argc
+ // -- rsp[0] : return address
+ // -- rsp[8] : last argument
+ // -----------------------------------
+ Label generic_array_code;
+
+ // Get the InternalArray function.
+ __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
+
+ if (FLAG_debug_code) {
+ // Initial map for the builtin InternalArray functions should be maps.
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi.
+ STATIC_ASSERT(kSmiTag == 0);
+ Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
+ __ Check(not_smi, "Unexpected initial map for InternalArray function");
+ __ CmpObjectType(rbx, MAP_TYPE, rcx);
+ __ Check(equal, "Unexpected initial map for InternalArray function");
+ }
+
+ // Run the native code for the InternalArray function called as a normal
+ // function.
+ ArrayNativeCode(masm, &generic_array_code);
+
+ // Jump to the generic array code in case the specialized code cannot handle
+ // the construction.
+ __ bind(&generic_array_code);
+ Handle<Code> array_code =
+ masm->isolate()->builtins()->InternalArrayCodeGeneric();
+ __ Jump(array_code, RelocInfo::CODE_TARGET);
}
@@ -1393,9 +1432,130 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
- // TODO(849): implement custom construct stub.
- // Generate a copy of the generic stub for now.
- Generate_JSConstructStubGeneric(masm);
+ // ----------- S t a t e -------------
+ // -- rax : number of arguments
+ // -- rdi : constructor function
+ // -- rsp[0] : return address
+ // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
+ // -- rsp[(argc + 1) * 8] : receiver
+ // -----------------------------------
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_ctor_calls(), 1);
+
+ if (FLAG_debug_code) {
+ __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
+ __ cmpq(rdi, rcx);
+ __ Assert(equal, "Unexpected String function");
+ }
+
+ // Load the first argument into rax and get rid of the rest
+ // (including the receiver).
+ Label no_arguments;
+ __ testq(rax, rax);
+ __ j(zero, &no_arguments);
+ __ movq(rbx, Operand(rsp, rax, times_pointer_size, 0));
+ __ pop(rcx);
+ __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
+ __ push(rcx);
+ __ movq(rax, rbx);
+
+ // Lookup the argument in the number to string cache.
+ Label not_cached, argument_is_string;
+ NumberToStringStub::GenerateLookupNumberStringCache(
+ masm,
+ rax, // Input.
+ rbx, // Result.
+ rcx, // Scratch 1.
+ rdx, // Scratch 2.
+ false, // Input is known to be smi?
+ &not_cached);
+ __ IncrementCounter(counters->string_ctor_cached_number(), 1);
+ __ bind(&argument_is_string);
+
+ // ----------- S t a t e -------------
+ // -- rbx : argument converted to string
+ // -- rdi : constructor function
+ // -- rsp[0] : return address
+ // -----------------------------------
+
+ // Allocate a JSValue and put the tagged pointer into rax.
+ Label gc_required;
+ __ AllocateInNewSpace(JSValue::kSize,
+ rax, // Result.
+ rcx, // New allocation top (we ignore it).
+ no_reg,
+ &gc_required,
+ TAG_OBJECT);
+
+ // Set the map.
+ __ LoadGlobalFunctionInitialMap(rdi, rcx);
+ if (FLAG_debug_code) {
+ __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
+ Immediate(JSValue::kSize >> kPointerSizeLog2));
+ __ Assert(equal, "Unexpected string wrapper instance size");
+ __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
+ __ Assert(equal, "Unexpected unused properties of string wrapper");
+ }
+ __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
+
+ // Set properties and elements.
+ __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
+ __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
+ __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
+
+ // Set the value.
+ __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx);
+
+ // Ensure the object is fully initialized.
+ STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
+
+ // We're done. Return.
+ __ ret(0);
+
+ // The argument was not found in the number to string cache. Check
+ // if it's a string already before calling the conversion builtin.
+ Label convert_argument;
+ __ bind(&not_cached);
+ STATIC_ASSERT(kSmiTag == 0);
+ __ JumpIfSmi(rax, &convert_argument);
+ Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
+ __ j(NegateCondition(is_string), &convert_argument);
+ __ movq(rbx, rax);
+ __ IncrementCounter(counters->string_ctor_string_value(), 1);
+ __ jmp(&argument_is_string);
+
+ // Invoke the conversion builtin and put the result into rbx.
+ __ bind(&convert_argument);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ push(rdi); // Preserve the function.
+ __ push(rax);
+ __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
+ __ pop(rdi);
+ }
+ __ movq(rbx, rax);
+ __ jmp(&argument_is_string);
+
+ // Load the empty string into rbx, remove the receiver from the
+ // stack, and jump back to the case where the argument is a string.
+ __ bind(&no_arguments);
+ __ LoadRoot(rbx, Heap::kEmptyStringRootIndex);
+ __ pop(rcx);
+ __ lea(rsp, Operand(rsp, kPointerSize));
+ __ push(rcx);
+ __ jmp(&argument_is_string);
+
+ // At this point the argument is already a string. Call runtime to
+ // create a string wrapper.
+ __ bind(&gc_required);
+ __ IncrementCounter(counters->string_ctor_gc_required(), 1);
+ {
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ push(rbx);
+ __ CallRuntime(Runtime::kNewStringWrapper, 1);
+ }
+ __ ret(0);
}
@@ -1504,6 +1664,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ bind(&invoke);
__ call(rdx);
+ // Store offset of return address for deoptimizer.
+ masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
+
// Leave frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ ret(0);
diff --git a/src/3rdparty/v8/src/x64/code-stubs-x64.cc b/src/3rdparty/v8/src/x64/code-stubs-x64.cc
index ae1603b..2845039 100644
--- a/src/3rdparty/v8/src/x64/code-stubs-x64.cc
+++ b/src/3rdparty/v8/src/x64/code-stubs-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -68,9 +68,9 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Get the function info from the stack.
__ movq(rdx, Operand(rsp, 1 * kPointerSize));
- int map_index = strict_mode_ == kStrictMode
- ? Context::STRICT_MODE_FUNCTION_MAP_INDEX
- : Context::FUNCTION_MAP_INDEX;
+ int map_index = (language_mode_ == CLASSIC_MODE)
+ ? Context::FUNCTION_MAP_INDEX
+ : Context::STRICT_MODE_FUNCTION_MAP_INDEX;
// Compute the function map in the current global context and set that
// as the map of the allocated object.
@@ -124,12 +124,12 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
// Get the function from the stack.
__ movq(rcx, Operand(rsp, 1 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
__ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
- // Setup the fixed slots.
+ // Set up the fixed slots.
__ Set(rbx, 0); // Set to NULL.
__ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
__ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
@@ -139,10 +139,6 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
__ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx);
- // Copy the qmlglobal object from the previous context.
- __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::QML_GLOBAL_INDEX)));
- __ movq(Operand(rax, Context::SlotOffset(Context::QML_GLOBAL_INDEX)), rbx);
-
// Initialize the rest of the slots to undefined.
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
@@ -177,7 +173,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
// Get the serialized scope info from the stack.
__ movq(rbx, Operand(rsp, 2 * kPointerSize));
- // Setup the object header.
+ // Set up the object header.
__ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
__ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
__ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
@@ -198,7 +194,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
__ bind(&after_sentinel);
- // Setup the fixed slots.
+ // Set up the fixed slots.
__ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
__ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
__ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
@@ -207,10 +203,6 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
__ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX));
__ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx);
- // Copy the qmlglobal object from the previous context.
- __ movq(rbx, ContextOperand(rsi, Context::QML_GLOBAL_INDEX));
- __ movq(ContextOperand(rax, Context::QML_GLOBAL_INDEX), rbx);
-
// Initialize the rest of the slots to the hole value.
__ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
for (int i = 0; i < slots_; i++) {
@@ -227,68 +219,38 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
}
-void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
+static void GenerateFastCloneShallowArrayCommon(
+ MacroAssembler* masm,
+ int length,
+ FastCloneShallowArrayStub::Mode mode,
+ Label* fail) {
+ // Registers on entry:
//
- // [rsp + kPointerSize]: constant elements.
- // [rsp + (2 * kPointerSize)]: literal index.
- // [rsp + (3 * kPointerSize)]: literals array.
+ // rcx: boilerplate literal array.
+ ASSERT(mode != FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS);
// All sizes here are multiples of kPointerSize.
int elements_size = 0;
- if (length_ > 0) {
- elements_size = mode_ == CLONE_DOUBLE_ELEMENTS
- ? FixedDoubleArray::SizeFor(length_)
- : FixedArray::SizeFor(length_);
+ if (length > 0) {
+ elements_size = mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ ? FixedDoubleArray::SizeFor(length)
+ : FixedArray::SizeFor(length);
}
int size = JSArray::kSize + elements_size;
- // Load boilerplate object into rcx and check if we need to create a
- // boilerplate.
- Label slow_case;
- __ movq(rcx, Operand(rsp, 3 * kPointerSize));
- __ movq(rax, Operand(rsp, 2 * kPointerSize));
- SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
- __ movq(rcx,
- FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
- __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
- __ j(equal, &slow_case);
-
- if (FLAG_debug_code) {
- const char* message;
- Heap::RootListIndex expected_map_index;
- if (mode_ == CLONE_ELEMENTS) {
- message = "Expected (writable) fixed array";
- expected_map_index = Heap::kFixedArrayMapRootIndex;
- } else if (mode_ == CLONE_DOUBLE_ELEMENTS) {
- message = "Expected (writable) fixed double array";
- expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
- } else {
- ASSERT(mode_ == COPY_ON_WRITE_ELEMENTS);
- message = "Expected copy-on-write fixed array";
- expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
- }
- __ push(rcx);
- __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
- __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
- expected_map_index);
- __ Assert(equal, message);
- __ pop(rcx);
- }
-
// Allocate both the JS array and the elements array in one big
// allocation. This avoids multiple limit checks.
- __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
+ __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT);
// Copy the JS array part.
for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
- if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
+ if ((i != JSArray::kElementsOffset) || (length == 0)) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rax, i), rbx);
}
}
- if (length_ > 0) {
+ if (length > 0) {
// Get hold of the elements array of the boilerplate and setup the
// elements pointer in the resulting object.
__ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
@@ -296,13 +258,13 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx);
// Copy the elements array.
- if (mode_ == CLONE_ELEMENTS) {
+ if (mode == FastCloneShallowArrayStub::CLONE_ELEMENTS) {
for (int i = 0; i < elements_size; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
__ movq(FieldOperand(rdx, i), rbx);
}
} else {
- ASSERT(mode_ == CLONE_DOUBLE_ELEMENTS);
+ ASSERT(mode == FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS);
int i;
for (i = 0; i < FixedDoubleArray::kHeaderSize; i += kPointerSize) {
__ movq(rbx, FieldOperand(rcx, i));
@@ -316,8 +278,75 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
ASSERT(i == elements_size);
}
}
+}
- // Return and remove the on-stack parameters.
+void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [rsp + kPointerSize]: constant elements.
+ // [rsp + (2 * kPointerSize)]: literal index.
+ // [rsp + (3 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into rcx and check if we need to create a
+ // boilerplate.
+ __ movq(rcx, Operand(rsp, 3 * kPointerSize));
+ __ movq(rax, Operand(rsp, 2 * kPointerSize));
+ SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
+ __ movq(rcx,
+ FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
+ __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
+ Label slow_case;
+ __ j(equal, &slow_case);
+
+ FastCloneShallowArrayStub::Mode mode = mode_;
+ // rcx is boilerplate object.
+ Factory* factory = masm->isolate()->factory();
+ if (mode == CLONE_ANY_ELEMENTS) {
+ Label double_elements, check_fast_elements;
+ __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset));
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ factory->fixed_cow_array_map());
+ __ j(not_equal, &check_fast_elements);
+ GenerateFastCloneShallowArrayCommon(masm, 0,
+ COPY_ON_WRITE_ELEMENTS, &slow_case);
+ __ ret(3 * kPointerSize);
+
+ __ bind(&check_fast_elements);
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ factory->fixed_array_map());
+ __ j(not_equal, &double_elements);
+ GenerateFastCloneShallowArrayCommon(masm, length_,
+ CLONE_ELEMENTS, &slow_case);
+ __ ret(3 * kPointerSize);
+
+ __ bind(&double_elements);
+ mode = CLONE_DOUBLE_ELEMENTS;
+ // Fall through to generate the code to handle double elements.
+ }
+
+ if (FLAG_debug_code) {
+ const char* message;
+ Heap::RootListIndex expected_map_index;
+ if (mode == CLONE_ELEMENTS) {
+ message = "Expected (writable) fixed array";
+ expected_map_index = Heap::kFixedArrayMapRootIndex;
+ } else if (mode == CLONE_DOUBLE_ELEMENTS) {
+ message = "Expected (writable) fixed double array";
+ expected_map_index = Heap::kFixedDoubleArrayMapRootIndex;
+ } else {
+ ASSERT(mode == COPY_ON_WRITE_ELEMENTS);
+ message = "Expected copy-on-write fixed array";
+ expected_map_index = Heap::kFixedCOWArrayMapRootIndex;
+ }
+ __ push(rcx);
+ __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset));
+ __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
+ expected_map_index);
+ __ Assert(equal, message);
+ __ pop(rcx);
+ }
+
+ GenerateFastCloneShallowArrayCommon(masm, length_, mode, &slow_case);
__ ret(3 * kPointerSize);
__ bind(&slow_case);
@@ -325,6 +354,49 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
+void FastCloneShallowObjectStub::Generate(MacroAssembler* masm) {
+ // Stack layout on entry:
+ //
+ // [rsp + kPointerSize]: object literal flags.
+ // [rsp + (2 * kPointerSize)]: constant properties.
+ // [rsp + (3 * kPointerSize)]: literal index.
+ // [rsp + (4 * kPointerSize)]: literals array.
+
+ // Load boilerplate object into ecx and check if we need to create a
+ // boilerplate.
+ Label slow_case;
+ __ movq(rcx, Operand(rsp, 4 * kPointerSize));
+ __ movq(rax, Operand(rsp, 3 * kPointerSize));
+ SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2);
+ __ movq(rcx,
+ FieldOperand(rcx, index.reg, index.scale, FixedArray::kHeaderSize));
+ __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex);
+ __ j(equal, &slow_case);
+
+ // Check that the boilerplate contains only fast properties and we can
+ // statically determine the instance size.
+ int size = JSObject::kHeaderSize + length_ * kPointerSize;
+ __ movq(rax, FieldOperand(rcx, HeapObject::kMapOffset));
+ __ movzxbq(rax, FieldOperand(rax, Map::kInstanceSizeOffset));
+ __ cmpq(rax, Immediate(size >> kPointerSizeLog2));
+ __ j(not_equal, &slow_case);
+
+ // Allocate the JS object and copy header together with all in-object
+ // properties from the boilerplate.
+ __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
+ for (int i = 0; i < size; i += kPointerSize) {
+ __ movq(rbx, FieldOperand(rcx, i));
+ __ movq(FieldOperand(rax, i), rbx);
+ }
+
+ // Return and remove the on-stack parameters.
+ __ ret(4 * kPointerSize);
+
+ __ bind(&slow_case);
+ __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1);
+}
+
+
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
@@ -1535,6 +1607,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ cmpq(rbx, Operand(rcx, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Cache hit!
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->transcendental_cache_hit(), 1);
__ movq(rax, Operand(rcx, 2 * kIntSize));
if (tagged) {
__ fstp(0); // Clear FPU stack.
@@ -1545,6 +1619,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
}
__ bind(&cache_miss);
+ __ IncrementCounter(counters->transcendental_cache_miss(), 1);
// Update cache with new value.
if (tagged) {
__ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
@@ -1553,7 +1628,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
__ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
}
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ movq(Operand(rcx, 0), rbx);
__ movq(Operand(rcx, 2 * kIntSize), rax);
__ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
@@ -1568,7 +1643,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
__ subq(rsp, Immediate(kDoubleSize));
__ movsd(Operand(rsp, 0), xmm1);
__ fld_d(Operand(rsp, 0));
- GenerateOperation(masm);
+ GenerateOperation(masm, type_);
__ fstp_d(Operand(rsp, 0));
__ movsd(xmm1, Operand(rsp, 0));
__ addq(rsp, Immediate(kDoubleSize));
@@ -1611,6 +1686,7 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
// Add more cases when necessary.
case TranscendentalCache::SIN: return Runtime::kMath_sin;
case TranscendentalCache::COS: return Runtime::kMath_cos;
+ case TranscendentalCache::TAN: return Runtime::kMath_tan;
case TranscendentalCache::LOG: return Runtime::kMath_log;
default:
UNIMPLEMENTED();
@@ -1619,14 +1695,17 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
}
-void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
+void TranscendentalCacheStub::GenerateOperation(
+ MacroAssembler* masm, TranscendentalCache::Type type) {
// Registers:
// rax: Newly allocated HeapNumber, which must be preserved.
// rbx: Bits of input double. Must be preserved.
// rcx: Pointer to cache entry. Must be preserved.
// st(0): Input double
Label done;
- if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) {
+ if (type == TranscendentalCache::SIN ||
+ type == TranscendentalCache::COS ||
+ type == TranscendentalCache::TAN) {
// Both fsin and fcos require arguments in the range +/-2^63 and
// return NaN for infinities and NaN. They can share all code except
// the actual fsin/fcos operation.
@@ -1647,8 +1726,12 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
__ j(not_equal, &non_nan_result, Label::kNear);
// Input is +/-Infinity or NaN. Result is NaN.
__ fstp(0);
- __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex);
- __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset));
+ // NaN is represented by 0x7ff8000000000000.
+ __ subq(rsp, Immediate(kPointerSize));
+ __ movl(Operand(rsp, 4), Immediate(0x7ff80000));
+ __ movl(Operand(rsp, 0), Immediate(0x00000000));
+ __ fld_d(Operand(rsp, 0));
+ __ addq(rsp, Immediate(kPointerSize));
__ jmp(&done);
__ bind(&non_nan_result);
@@ -1689,19 +1772,25 @@ void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) {
// FPU Stack: input % 2*pi
__ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
__ bind(&in_range);
- switch (type_) {
+ switch (type) {
case TranscendentalCache::SIN:
__ fsin();
break;
case TranscendentalCache::COS:
__ fcos();
break;
+ case TranscendentalCache::TAN:
+ // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
+ // FP register stack.
+ __ fptan();
+ __ fstp(0); // Pop FP register stack.
+ break;
default:
UNREACHABLE();
}
__ bind(&done);
} else {
- ASSERT(type_ == TranscendentalCache::LOG);
+ ASSERT(type == TranscendentalCache::LOG);
__ fldln2();
__ fxch();
__ fyl2x();
@@ -1907,152 +1996,259 @@ void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
void MathPowStub::Generate(MacroAssembler* masm) {
- // Registers are used as follows:
- // rdx = base
- // rax = exponent
- // rcx = temporary, result
-
- Label allocate_return, call_runtime;
-
- // Load input parameters.
- __ movq(rdx, Operand(rsp, 2 * kPointerSize));
- __ movq(rax, Operand(rsp, 1 * kPointerSize));
+ // Choose register conforming to calling convention (when bailing out).
+#ifdef _WIN64
+ const Register exponent = rdx;
+#else
+ const Register exponent = rdi;
+#endif
+ const Register base = rax;
+ const Register scratch = rcx;
+ const XMMRegister double_result = xmm3;
+ const XMMRegister double_base = xmm2;
+ const XMMRegister double_exponent = xmm1;
+ const XMMRegister double_scratch = xmm4;
- // Save 1 in xmm3 - we need this several times later on.
- __ Set(rcx, 1);
- __ cvtlsi2sd(xmm3, rcx);
+ Label call_runtime, done, exponent_not_smi, int_exponent;
- Label exponent_nonsmi;
- Label base_nonsmi;
- // If the exponent is a heap number go to that specific case.
- __ JumpIfNotSmi(rax, &exponent_nonsmi);
- __ JumpIfNotSmi(rdx, &base_nonsmi);
+ // Save 1 in double_result - we need this several times later on.
+ __ movq(scratch, Immediate(1));
+ __ cvtlsi2sd(double_result, scratch);
+
+ if (exponent_type_ == ON_STACK) {
+ Label base_is_smi, unpack_exponent;
+ // The exponent and base are supplied as arguments on the stack.
+ // This can only happen if the stub is called from non-optimized code.
+ // Load input parameters from stack.
+ __ movq(base, Operand(rsp, 2 * kPointerSize));
+ __ movq(exponent, Operand(rsp, 1 * kPointerSize));
+ __ JumpIfSmi(base, &base_is_smi, Label::kNear);
+ __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &call_runtime);
+
+ __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
+ __ jmp(&unpack_exponent, Label::kNear);
+
+ __ bind(&base_is_smi);
+ __ SmiToInteger32(base, base);
+ __ cvtlsi2sd(double_base, base);
+ __ bind(&unpack_exponent);
+
+ __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
+ __ SmiToInteger32(exponent, exponent);
+ __ jmp(&int_exponent);
+
+ __ bind(&exponent_not_smi);
+ __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &call_runtime);
+ __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
+ } else if (exponent_type_ == TAGGED) {
+ __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
+ __ SmiToInteger32(exponent, exponent);
+ __ jmp(&int_exponent);
+
+ __ bind(&exponent_not_smi);
+ __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
+ }
- // Optimized version when both exponent and base are smis.
- Label powi;
- __ SmiToInteger32(rdx, rdx);
- __ cvtlsi2sd(xmm0, rdx);
- __ jmp(&powi);
- // Exponent is a smi and base is a heapnumber.
- __ bind(&base_nonsmi);
- __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
- Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &call_runtime);
+ if (exponent_type_ != INTEGER) {
+ Label fast_power;
+ // Detect integer exponents stored as double.
+ __ cvttsd2si(exponent, double_exponent);
+ // Skip to runtime if possibly NaN (indicated by the indefinite integer).
+ __ cmpl(exponent, Immediate(0x80000000u));
+ __ j(equal, &call_runtime);
+ __ cvtlsi2sd(double_scratch, exponent);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_exponent, double_scratch);
+ __ j(equal, &int_exponent);
+
+ if (exponent_type_ == ON_STACK) {
+ // Detect square root case. Crankshaft detects constant +/-0.5 at
+ // compile time and uses DoMathPowHalf instead. We then skip this check
+ // for non-constant cases of +/-0.5 as these hardly occur.
+ Label continue_sqrt, continue_rsqrt, not_plus_half;
+ // Test for 0.5.
+ // Load double_scratch with 0.5.
+ __ movq(scratch, V8_UINT64_C(0x3FE0000000000000), RelocInfo::NONE);
+ __ movq(double_scratch, scratch);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_scratch, double_exponent);
+ __ j(not_equal, &not_plus_half, Label::kNear);
+
+ // Calculates square root of base. Check for the special case of
+ // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
+ // According to IEEE-754, double-precision -Infinity has the highest
+ // 12 bits set and the lowest 52 bits cleared.
+ __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE);
+ __ movq(double_scratch, scratch);
+ __ ucomisd(double_scratch, double_base);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &continue_sqrt, Label::kNear);
+ __ j(carry, &continue_sqrt, Label::kNear);
+
+ // Set result to Infinity in the special case.
+ __ xorps(double_result, double_result);
+ __ subsd(double_result, double_scratch);
+ __ jmp(&done);
+
+ __ bind(&continue_sqrt);
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorps(double_scratch, double_scratch);
+ __ addsd(double_scratch, double_base); // Convert -0 to 0.
+ __ sqrtsd(double_result, double_scratch);
+ __ jmp(&done);
+
+ // Test for -0.5.
+ __ bind(&not_plus_half);
+ // Load double_scratch with -0.5 by substracting 1.
+ __ subsd(double_scratch, double_result);
+ // Already ruled out NaNs for exponent.
+ __ ucomisd(double_scratch, double_exponent);
+ __ j(not_equal, &fast_power, Label::kNear);
+
+ // Calculates reciprocal of square root of base. Check for the special
+ // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
+ // According to IEEE-754, double-precision -Infinity has the highest
+ // 12 bits set and the lowest 52 bits cleared.
+ __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE);
+ __ movq(double_scratch, scratch);
+ __ ucomisd(double_scratch, double_base);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &continue_rsqrt, Label::kNear);
+ __ j(carry, &continue_rsqrt, Label::kNear);
+
+ // Set result to 0 in the special case.
+ __ xorps(double_result, double_result);
+ __ jmp(&done);
+
+ __ bind(&continue_rsqrt);
+ // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
+ __ xorps(double_exponent, double_exponent);
+ __ addsd(double_exponent, double_base); // Convert -0 to +0.
+ __ sqrtsd(double_exponent, double_exponent);
+ __ divsd(double_result, double_exponent);
+ __ jmp(&done);
+ }
- __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
+ // Using FPU instructions to calculate power.
+ Label fast_power_failed;
+ __ bind(&fast_power);
+ __ fnclex(); // Clear flags to catch exceptions later.
+ // Transfer (B)ase and (E)xponent onto the FPU register stack.
+ __ subq(rsp, Immediate(kDoubleSize));
+ __ movsd(Operand(rsp, 0), double_exponent);
+ __ fld_d(Operand(rsp, 0)); // E
+ __ movsd(Operand(rsp, 0), double_base);
+ __ fld_d(Operand(rsp, 0)); // B, E
+
+ // Exponent is in st(1) and base is in st(0)
+ // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
+ // FYL2X calculates st(1) * log2(st(0))
+ __ fyl2x(); // X
+ __ fld(0); // X, X
+ __ frndint(); // rnd(X), X
+ __ fsub(1); // rnd(X), X-rnd(X)
+ __ fxch(1); // X - rnd(X), rnd(X)
+ // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
+ __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
+ __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
+ __ faddp(1); // 1, 2^(X-rnd(X)), rnd(X)
+ // FSCALE calculates st(0) * 2^st(1)
+ __ fscale(); // 2^X, rnd(X)
+ __ fstp(1);
+ // Bail out to runtime in case of exceptions in the status word.
+ __ fnstsw_ax();
+ __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
+ __ j(not_zero, &fast_power_failed, Label::kNear);
+ __ fstp_d(Operand(rsp, 0));
+ __ movsd(double_result, Operand(rsp, 0));
+ __ addq(rsp, Immediate(kDoubleSize));
+ __ jmp(&done);
- // Optimized version of pow if exponent is a smi.
- // xmm0 contains the base.
- __ bind(&powi);
- __ SmiToInteger32(rax, rax);
+ __ bind(&fast_power_failed);
+ __ fninit();
+ __ addq(rsp, Immediate(kDoubleSize));
+ __ jmp(&call_runtime);
+ }
- // Save exponent in base as we need to check if exponent is negative later.
- // We know that base and exponent are in different registers.
- __ movq(rdx, rax);
+ // Calculate power with integer exponent.
+ __ bind(&int_exponent);
+ const XMMRegister double_scratch2 = double_exponent;
+ // Back up exponent as we need to check if exponent is negative later.
+ __ movq(scratch, exponent); // Back up exponent.
+ __ movsd(double_scratch, double_base); // Back up base.
+ __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
// Get absolute value of exponent.
- Label no_neg;
- __ cmpl(rax, Immediate(0));
- __ j(greater_equal, &no_neg, Label::kNear);
- __ negl(rax);
+ Label no_neg, while_true, no_multiply;
+ __ testl(scratch, scratch);
+ __ j(positive, &no_neg, Label::kNear);
+ __ negl(scratch);
__ bind(&no_neg);
- // Load xmm1 with 1.
- __ movaps(xmm1, xmm3);
- Label while_true;
- Label no_multiply;
-
__ bind(&while_true);
- __ shrl(rax, Immediate(1));
+ __ shrl(scratch, Immediate(1));
__ j(not_carry, &no_multiply, Label::kNear);
- __ mulsd(xmm1, xmm0);
+ __ mulsd(double_result, double_scratch);
__ bind(&no_multiply);
- __ mulsd(xmm0, xmm0);
- __ j(not_zero, &while_true);
-
- // Base has the original value of the exponent - if the exponent is
- // negative return 1/result.
- __ testl(rdx, rdx);
- __ j(positive, &allocate_return);
- // Special case if xmm1 has reached infinity.
- __ divsd(xmm3, xmm1);
- __ movaps(xmm1, xmm3);
- __ xorps(xmm0, xmm0);
- __ ucomisd(xmm0, xmm1);
- __ j(equal, &call_runtime);
-
- __ jmp(&allocate_return);
-
- // Exponent (or both) is a heapnumber - no matter what we should now work
- // on doubles.
- __ bind(&exponent_nonsmi);
- __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
- Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &call_runtime);
- __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
- // Test if exponent is nan.
- __ ucomisd(xmm1, xmm1);
- __ j(parity_even, &call_runtime);
- Label base_not_smi, handle_special_cases;
- __ JumpIfNotSmi(rdx, &base_not_smi, Label::kNear);
- __ SmiToInteger32(rdx, rdx);
- __ cvtlsi2sd(xmm0, rdx);
- __ jmp(&handle_special_cases, Label::kNear);
+ __ mulsd(double_scratch, double_scratch);
+ __ j(not_zero, &while_true);
- __ bind(&base_not_smi);
- __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset),
- Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &call_runtime);
- __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset));
- __ andl(rcx, Immediate(HeapNumber::kExponentMask));
- __ cmpl(rcx, Immediate(HeapNumber::kExponentMask));
- // base is NaN or +/-Infinity
- __ j(greater_equal, &call_runtime);
- __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
+ // If the exponent is negative, return 1/result.
+ __ testl(exponent, exponent);
+ __ j(greater, &done);
+ __ divsd(double_scratch2, double_result);
+ __ movsd(double_result, double_scratch2);
+ // Test whether result is zero. Bail out to check for subnormal result.
+ // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
+ __ xorps(double_scratch2, double_scratch2);
+ __ ucomisd(double_scratch2, double_result);
+ // double_exponent aliased as double_scratch2 has already been overwritten
+ // and may not have contained the exponent value in the first place when the
+ // input was a smi. We reset it with exponent value before bailing out.
+ __ j(not_equal, &done);
+ __ cvtlsi2sd(double_exponent, exponent);
+
+ // Returning or bailing out.
+ Counters* counters = masm->isolate()->counters();
+ if (exponent_type_ == ON_STACK) {
+ // The arguments are still on the stack.
+ __ bind(&call_runtime);
+ __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
- // base is in xmm0 and exponent is in xmm1.
- __ bind(&handle_special_cases);
- Label not_minus_half;
- // Test for -0.5.
- // Load xmm2 with -0.5.
- __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE);
- __ movq(xmm2, rcx);
- // xmm2 now has -0.5.
- __ ucomisd(xmm2, xmm1);
- __ j(not_equal, &not_minus_half, Label::kNear);
-
- // Calculates reciprocal of square root.
- // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
- __ addsd(xmm1, xmm0);
- __ sqrtsd(xmm1, xmm1);
- __ divsd(xmm3, xmm1);
- __ movaps(xmm1, xmm3);
- __ jmp(&allocate_return);
-
- // Test for 0.5.
- __ bind(&not_minus_half);
- // Load xmm2 with 0.5.
- // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3.
- __ addsd(xmm2, xmm3);
- // xmm2 now has 0.5.
- __ ucomisd(xmm2, xmm1);
- __ j(not_equal, &call_runtime);
- // Calculates square root.
- // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
- __ xorps(xmm1, xmm1);
- __ addsd(xmm1, xmm0); // Convert -0 to 0.
- __ sqrtsd(xmm1, xmm1);
-
- __ bind(&allocate_return);
- __ AllocateHeapNumber(rcx, rax, &call_runtime);
- __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1);
- __ movq(rax, rcx);
- __ ret(2 * kPointerSize);
+ // The stub is called from non-optimized code, which expects the result
+ // as heap number in eax.
+ __ bind(&done);
+ __ AllocateHeapNumber(rax, rcx, &call_runtime);
+ __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
+ __ IncrementCounter(counters->math_pow(), 1);
+ __ ret(2 * kPointerSize);
+ } else {
+ __ bind(&call_runtime);
+ // Move base to the correct argument register. Exponent is already in xmm1.
+ __ movsd(xmm0, double_base);
+ ASSERT(double_exponent.is(xmm1));
+ {
+ AllowExternalCallThatCantCauseGC scope(masm);
+ __ PrepareCallCFunction(2);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()), 2);
+ }
+ // Return value is in xmm0.
+ __ movsd(double_result, xmm0);
+ // Restore context register.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ bind(&call_runtime);
- __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
+ __ bind(&done);
+ __ IncrementCounter(counters->math_pow(), 1);
+ __ ret(0);
+ }
}
@@ -2166,6 +2362,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
Label no_parameter_map;
+ __ xor_(r8, r8);
__ testq(rbx, rbx);
__ j(zero, &no_parameter_map, Label::kNear);
__ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
@@ -2208,7 +2405,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
__ movq(FieldOperand(rax, i), rdx);
}
- // Setup the callee in-object property.
+ // Set up the callee in-object property.
STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
__ movq(rdx, Operand(rsp, 3 * kPointerSize));
__ movq(FieldOperand(rax, JSObject::kHeaderSize +
@@ -2223,7 +2420,7 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Heap::kArgumentsLengthIndex * kPointerSize),
rcx);
- // Setup the elements pointer in the allocated arguments object.
+ // Set up the elements pointer in the allocated arguments object.
// If we allocated a parameter map, edi will point there, otherwise to the
// backing store.
__ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
@@ -2259,16 +2456,13 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Label parameters_loop, parameters_test;
// Load tagged parameter count into r9.
- __ movq(r9, Operand(rsp, 1 * kPointerSize));
+ __ Integer32ToSmi(r9, rbx);
__ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
- __ addq(r8, Operand(rsp, 3 * kPointerSize));
+ __ addq(r8, Operand(rsp, 1 * kPointerSize));
__ subq(r8, r9);
__ Move(r11, factory->the_hole_value());
__ movq(rdx, rdi);
- __ SmiToInteger64(kScratchRegister, r9);
- __ lea(rdi, Operand(rdi, kScratchRegister,
- times_pointer_size,
- kParameterMapHeaderSize));
+ __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
// r9 = loop variable (tagged)
// r8 = mapping index (tagged)
// r11 = the hole value
@@ -2304,9 +2498,8 @@ void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
Label arguments_loop, arguments_test;
__ movq(r8, rbx);
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
- // Untag rcx and r8 for the loop below.
+ // Untag rcx for the loop below.
__ SmiToInteger64(rcx, rcx);
- __ SmiToInteger64(r8, r8);
__ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
__ subq(rdx, kScratchRegister);
__ jmp(&arguments_test, Label::kNear);
@@ -2430,7 +2623,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
// Get the parameters pointer from the stack.
__ movq(rdx, Operand(rsp, 2 * kPointerSize));
- // Setup the elements pointer in the allocated arguments object and
+ // Set up the elements pointer in the allocated arguments object and
// initialize the header in the elements fixed array.
__ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
__ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
@@ -2574,26 +2767,40 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
__ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
// First check for flat two byte string.
- __ andb(rbx, Immediate(
- kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask));
+ __ andb(rbx, Immediate(kIsNotStringMask |
+ kStringRepresentationMask |
+ kStringEncodingMask |
+ kShortExternalStringMask));
STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
__ j(zero, &seq_two_byte_string, Label::kNear);
- // Any other flat string must be a flat ascii string.
- __ andb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
+ // Any other flat string must be a flat ASCII string. None of the following
+ // string type tests will succeed if subject is not a string or a short
+ // external string.
+ __ andb(rbx, Immediate(kIsNotStringMask |
+ kStringRepresentationMask |
+ kShortExternalStringMask));
__ j(zero, &seq_ascii_string, Label::kNear);
+ // rbx: whether subject is a string and if yes, its string representation
// Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
// In the case of a sliced string its offset has to be taken into account.
- Label cons_string, check_encoding;
+ Label cons_string, external_string, check_encoding;
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
+ STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
__ cmpq(rbx, Immediate(kExternalStringTag));
__ j(less, &cons_string, Label::kNear);
- __ j(equal, &runtime);
+ __ j(equal, &external_string);
+
+ // Catch non-string subject or short external string.
+ STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
+ __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
+ __ j(not_zero, &runtime);
// String is sliced.
__ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
@@ -2617,16 +2824,16 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Immediate(kStringRepresentationMask | kStringEncodingMask));
STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
__ j(zero, &seq_two_byte_string, Label::kNear);
- // Any other flat string must be ascii.
+ // Any other flat string must be sequential ASCII or external.
__ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
Immediate(kStringRepresentationMask));
- __ j(not_zero, &runtime);
+ __ j(not_zero, &external_string);
__ bind(&seq_ascii_string);
- // rdi: subject string (sequential ascii)
+ // rdi: subject string (sequential ASCII)
// rax: RegExp data (FixedArray)
__ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
- __ Set(rcx, 1); // Type is ascii.
+ __ Set(rcx, 1); // Type is ASCII.
__ jmp(&check_code, Label::kNear);
__ bind(&seq_two_byte_string);
@@ -2642,7 +2849,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ JumpIfSmi(r11, &runtime);
// rdi: subject string
- // rcx: encoding of subject string (1 if ascii, 0 if two_byte);
+ // rcx: encoding of subject string (1 if ASCII, 0 if two_byte);
// r11: code
// Load used arguments before starting to push arguments for call to native
// RegExp code to avoid handling changing stack height.
@@ -2650,7 +2857,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// rdi: subject string
// rbx: previous index
- // rcx: encoding of subject string (1 if ascii 0 if two_byte);
+ // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
// r11: code
// All checks done. Now push arguments for native regexp code.
Counters* counters = masm->isolate()->counters();
@@ -2707,7 +2914,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Keep track on aliasing between argX defined above and the registers used.
// rdi: subject string
// rbx: previous index
- // rcx: encoding of subject string (1 if ascii 0 if two_byte);
+ // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
// r11: code
// r14: slice offset
// r15: original subject string
@@ -2852,7 +3059,28 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ Throw(rax);
__ bind(&termination_exception);
- __ ThrowUncatchable(TERMINATION, rax);
+ __ ThrowUncatchable(rax);
+
+ // External string. Short external strings have already been ruled out.
+ // rdi: subject string (expected to be external)
+ // rbx: scratch
+ __ bind(&external_string);
+ __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
+ __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ testb(rbx, Immediate(kIsIndirectStringMask));
+ __ Assert(zero, "external string expected, but not found");
+ }
+ __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ testb(rbx, Immediate(kStringEncodingMask));
+ __ j(not_zero, &seq_ascii_string);
+ __ jmp(&seq_two_byte_string);
// Do the runtime call to execute the regexp.
__ bind(&runtime);
@@ -3092,37 +3320,6 @@ void CompareStub::Generate(MacroAssembler* masm) {
// NOTICE! This code is only reached after a smi-fast-case check, so
// it is certain that at least one operand isn't a smi.
- {
- Label not_user_equal, user_equal;
- __ JumpIfSmi(rax, &not_user_equal);
- __ JumpIfSmi(rdx, &not_user_equal);
-
- __ CmpObjectType(rax, JS_OBJECT_TYPE, rbx);
- __ j(not_equal, &not_user_equal);
-
- __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
- __ j(not_equal, &not_user_equal);
-
- __ testb(FieldOperand(rbx, Map::kBitField2Offset),
- Immediate(1 << Map::kUseUserObjectComparison));
- __ j(not_zero, &user_equal);
- __ testb(FieldOperand(rcx, Map::kBitField2Offset),
- Immediate(1 << Map::kUseUserObjectComparison));
- __ j(not_zero, &user_equal);
-
- __ jmp(&not_user_equal);
-
- __ bind(&user_equal);
-
- __ pop(rbx); // Return address.
- __ push(rax);
- __ push(rdx);
- __ push(rbx);
- __ TailCallRuntime(Runtime::kUserObjectEquals, 2, 1);
-
- __ bind(&not_user_equal);
- }
-
// Two identical objects are equal unless they are both NaN or undefined.
{
Label not_identical;
@@ -3288,7 +3485,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ JumpIfNotBothSequentialAsciiStrings(
rdx, rax, rcx, rbx, &check_unequal_objects);
- // Inline comparison of ascii strings.
+ // Inline comparison of ASCII strings.
if (cc_ == equal) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
rdx,
@@ -3387,23 +3584,52 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
}
-void CallFunctionStub::FinishCode(Code* code) {
- code->set_has_function_cache(false);
+void InterruptStub::Generate(MacroAssembler* masm) {
+ __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
}
-void CallFunctionStub::Clear(Heap* heap, Address address) {
- UNREACHABLE();
-}
+static void GenerateRecordCallTarget(MacroAssembler* masm) {
+ // Cache the called function in a global property cell. Cache states
+ // are uninitialized, monomorphic (indicated by a JSFunction), and
+ // megamorphic.
+ // rbx : cache cell for call target
+ // rdi : the function to call
+ Isolate* isolate = masm->isolate();
+ Label initialize, done;
+
+ // Load the cache state into rcx.
+ __ movq(rcx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
+
+ // A monomorphic cache hit or an already megamorphic state: invoke the
+ // function without changing the state.
+ __ cmpq(rcx, rdi);
+ __ j(equal, &done, Label::kNear);
+ __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate));
+ __ j(equal, &done, Label::kNear);
+
+ // A monomorphic miss (i.e, here the cache is not uninitialized) goes
+ // megamorphic.
+ __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate));
+ __ j(equal, &initialize, Label::kNear);
+ // MegamorphicSentinel is an immortal immovable object (undefined) so no
+ // write-barrier is needed.
+ __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ TypeFeedbackCells::MegamorphicSentinel(isolate));
+ __ jmp(&done, Label::kNear);
+ // An uninitialized cache is patched with the function.
+ __ bind(&initialize);
+ __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rdi);
+ // No need for a write barrier here - cells are rescanned.
-Object* CallFunctionStub::GetCachedValue(Address address) {
- UNREACHABLE();
- return NULL;
+ __ bind(&done);
}
void CallFunctionStub::Generate(MacroAssembler* masm) {
+ // rdi : the function to call
+ // rbx : cache cell for call target
Label slow, non_function;
// The receiver might implicitly be the global object. This is
@@ -3424,10 +3650,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ bind(&call);
}
- // Get the function to call from the stack.
- // +2 ~ receiver, return address
- __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
-
// Check that the function really is a JavaScript function.
__ JumpIfSmi(rdi, &non_function);
// Goto slow case if we do not have a function.
@@ -3464,7 +3686,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ push(rcx);
__ Set(rax, argc_ + 1);
__ Set(rbx, 0);
- __ SetCallKind(rcx, CALL_AS_FUNCTION);
+ __ SetCallKind(rcx, CALL_AS_METHOD);
__ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
@@ -3486,6 +3708,49 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
}
+void CallConstructStub::Generate(MacroAssembler* masm) {
+ // rax : number of arguments
+ // rbx : cache cell for call target
+ // rdi : constructor function
+ Label slow, non_function_call;
+
+ // Check that function is not a smi.
+ __ JumpIfSmi(rdi, &non_function_call);
+ // Check that function is a JSFunction.
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
+ __ j(not_equal, &slow);
+
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
+
+ // Jump to the function-specific construct stub.
+ __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset));
+ __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize));
+ __ jmp(rbx);
+
+ // rdi: called object
+ // rax: number of arguments
+ // rcx: object map
+ Label do_call;
+ __ bind(&slow);
+ __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
+ __ j(not_equal, &non_function_call);
+ __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
+ __ jmp(&do_call);
+
+ __ bind(&non_function_call);
+ __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
+ __ bind(&do_call);
+ // Set expected number of arguments to zero (not changing rax).
+ __ Set(rbx, 0);
+ __ SetCallKind(rcx, CALL_AS_METHOD);
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ RelocInfo::CODE_TARGET);
+}
+
+
bool CEntryStub::NeedsImmovableCode() {
return false;
}
@@ -3520,12 +3785,6 @@ void CEntryStub::GenerateAheadOfTime() {
}
-void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
- // Throw exception in eax.
- __ Throw(rax);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
@@ -3666,12 +3925,6 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
}
-void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm,
- UncatchableExceptionType type) {
- __ ThrowUncatchable(type, rax);
-}
-
-
void CEntryStub::Generate(MacroAssembler* masm) {
// rax: number of arguments including receiver
// rbx: pointer to C function (C callee-saved)
@@ -3735,22 +3988,34 @@ void CEntryStub::Generate(MacroAssembler* masm) {
true);
__ bind(&throw_out_of_memory_exception);
- GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
+ // Set external caught exception to false.
+ Isolate* isolate = masm->isolate();
+ ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
+ isolate);
+ __ Set(rax, static_cast<int64_t>(false));
+ __ Store(external_caught, rax);
+
+ // Set pending exception and rax to out of memory exception.
+ ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
+ isolate);
+ __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
+ __ Store(pending_exception, rax);
+ // Fall through to the next label.
__ bind(&throw_termination_exception);
- GenerateThrowUncatchable(masm, TERMINATION);
+ __ ThrowUncatchable(rax);
__ bind(&throw_normal_exception);
- GenerateThrowTOS(masm);
+ __ Throw(rax);
}
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
- Label invoke, exit;
+ Label invoke, handler_entry, exit;
Label not_outermost_js, not_outermost_js_2;
{ // NOLINT. Scope block confuses linter.
MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
- // Setup frame.
+ // Set up frame.
__ push(rbp);
__ movq(rbp, rsp);
@@ -3806,20 +4071,23 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
- // Call a faked try-block that does the invoke.
- __ call(&invoke);
-
- // Caught exception: Store result (exception) in the pending
- // exception field in the JSEnv and return a failure sentinel.
+ // Jump to a faked try block that does the invoke, with a faked catch
+ // block that sets the pending exception.
+ __ jmp(&invoke);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel.
ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
isolate);
__ Store(pending_exception, rax);
__ movq(rax, Failure::Exception(), RelocInfo::NONE);
__ jmp(&exit);
- // Invoke: Link this frame into the handler chain.
+ // Invoke: Link this frame into the handler chain. There's only one
+ // handler block in this code object, so its index is 0.
__ bind(&invoke);
- __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ __ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
__ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
@@ -3828,11 +4096,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Fake a receiver (NULL).
__ push(Immediate(0)); // receiver
- // Invoke the function by calling through JS entry trampoline
- // builtin and pop the faked function when we return. We load the address
- // from an external reference instead of inlining the call target address
- // directly in the code, because the builtin stubs may not have been
- // generated yet at the time this code is generated.
+ // Invoke the function by calling through JS entry trampoline builtin and
+ // pop the faked function when we return. We load the address from an
+ // external reference instead of inlining the call target address directly
+ // in the code, because the builtin stubs may not have been generated yet
+ // at the time this code is generated.
if (is_construct) {
ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
isolate);
@@ -3961,12 +4229,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Get return address and delta to inlined map check.
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
- __ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache (check).");
}
+ __ movq(kScratchRegister,
+ Operand(kScratchRegister, kOffsetToMapCheckValue));
+ __ movq(Operand(kScratchRegister, 0), rax);
}
__ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
@@ -4110,79 +4380,17 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// If the index is non-smi trigger the non-smi case.
__ JumpIfNotSmi(index_, &index_not_smi_);
-
- // Put smi-tagged index into scratch register.
- __ movq(scratch_, index_);
__ bind(&got_smi_index_);
// Check for index out of range.
- __ SmiCompare(scratch_, FieldOperand(object_, String::kLengthOffset));
+ __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
__ j(above_equal, index_out_of_range_);
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ testb(result_, Immediate(kStringRepresentationMask));
- __ j(zero, &flat_string);
-
- // Handle non-flat strings.
- __ and_(result_, Immediate(kStringRepresentationMask));
- STATIC_ASSERT(kConsStringTag < kExternalStringTag);
- STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
- __ cmpb(result_, Immediate(kExternalStringTag));
- __ j(greater, &sliced_string);
- __ j(equal, &call_runtime_);
-
- // ConsString.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- Label assure_seq_string;
- __ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
- Heap::kEmptyStringRootIndex);
- __ j(not_equal, &call_runtime_);
- // Get the first of the two strings and load its instance type.
- ASSERT(!kScratchRegister.is(scratch_));
- __ movq(kScratchRegister, FieldOperand(object_, ConsString::kFirstOffset));
- __ jmp(&assure_seq_string, Label::kNear);
-
- // SlicedString, unpack and add offset.
- __ bind(&sliced_string);
- __ addq(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
- __ movq(kScratchRegister, FieldOperand(object_, SlicedString::kParentOffset));
+ __ SmiToInteger32(index_, index_);
- __ bind(&assure_seq_string);
- __ movq(result_, FieldOperand(kScratchRegister, HeapObject::kMapOffset));
- __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ testb(result_, Immediate(kStringRepresentationMask));
- __ j(not_zero, &call_runtime_);
- __ movq(object_, kScratchRegister);
+ StringCharLoadGenerator::Generate(
+ masm, object_, index_, result_, &call_runtime_);
- // Check for 1-byte or 2-byte string.
- __ bind(&flat_string);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ testb(result_, Immediate(kStringEncodingMask));
- __ j(not_zero, &ascii_string);
-
- // 2-byte string.
- // Load the 2-byte character code into the result register.
- __ SmiToInteger32(scratch_, scratch_);
- __ movzxwl(result_, FieldOperand(object_,
- scratch_, times_2,
- SeqTwoByteString::kHeaderSize));
- __ jmp(&got_char_code);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ SmiToInteger32(scratch_, scratch_);
- __ movzxbl(result_, FieldOperand(object_,
- scratch_, times_1,
- SeqAsciiString::kHeaderSize));
- __ bind(&got_char_code);
__ Integer32ToSmi(result_, result_);
__ bind(&exit_);
}
@@ -4203,7 +4411,6 @@ void StringCharCodeAtGenerator::GenerateSlow(
DONT_DO_SMI_CHECK);
call_helper.BeforeCall(masm);
__ push(object_);
- __ push(index_);
__ push(index_); // Consumed by runtime conversion function.
if (index_flags_ == STRING_INDEX_IS_NUMBER) {
__ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
@@ -4212,19 +4419,18 @@ void StringCharCodeAtGenerator::GenerateSlow(
// NumberToSmi discards numbers that are not exact integers.
__ CallRuntime(Runtime::kNumberToSmi, 1);
}
- if (!scratch_.is(rax)) {
+ if (!index_.is(rax)) {
// Save the conversion result before the pop instructions below
// have a chance to overwrite it.
- __ movq(scratch_, rax);
+ __ movq(index_, rax);
}
- __ pop(index_);
__ pop(object_);
// Reload the instance type.
__ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
__ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
call_helper.AfterCall(masm);
// If index is still not a smi, it must be out of range.
- __ JumpIfNotSmi(scratch_, index_out_of_range_);
+ __ JumpIfNotSmi(index_, index_out_of_range_);
// Otherwise, return to the fast path.
__ jmp(&got_smi_index_);
@@ -4234,6 +4440,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ bind(&call_runtime_);
call_helper.BeforeCall(masm);
__ push(object_);
+ __ Integer32ToSmi(index_, index_);
__ push(index_);
__ CallRuntime(Runtime::kStringCharCodeAt, 2);
if (!result_.is(rax)) {
@@ -4302,7 +4509,7 @@ void StringCharAtGenerator::GenerateSlow(
void StringAddStub::Generate(MacroAssembler* masm) {
- Label string_add_runtime, call_builtin;
+ Label call_runtime, call_builtin;
Builtins::JavaScript builtin_id = Builtins::ADD;
// Load the two arguments.
@@ -4311,14 +4518,14 @@ void StringAddStub::Generate(MacroAssembler* masm) {
// Make sure that both arguments are strings if not known in advance.
if (flags_ == NO_STRING_ADD_FLAGS) {
- __ JumpIfSmi(rax, &string_add_runtime);
+ __ JumpIfSmi(rax, &call_runtime);
__ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
- __ j(above_equal, &string_add_runtime);
+ __ j(above_equal, &call_runtime);
// First argument is a a string, test second.
- __ JumpIfSmi(rdx, &string_add_runtime);
+ __ JumpIfSmi(rdx, &call_runtime);
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
- __ j(above_equal, &string_add_runtime);
+ __ j(above_equal, &call_runtime);
} else {
// Here at least one of the arguments is definitely a string.
// We convert the one that is not known to be a string.
@@ -4384,9 +4591,9 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ SmiCompare(rbx, Smi::FromInt(2));
__ j(not_equal, &longer_than_two);
- // Check that both strings are non-external ascii strings.
+ // Check that both strings are non-external ASCII strings.
__ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
- &string_add_runtime);
+ &call_runtime);
// Get the two characters forming the sub string.
__ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
@@ -4401,20 +4608,30 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ ret(2 * kPointerSize);
__ bind(&make_two_character_string);
- __ Set(rbx, 2);
- __ jmp(&make_flat_ascii_string);
+ __ Set(rdi, 2);
+ __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime);
+ // rbx - first byte: first character
+ // rbx - second byte: *maybe* second character
+ // Make sure that the second byte of rbx contains the second character.
+ __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
+ __ shll(rcx, Immediate(kBitsPerByte));
+ __ orl(rbx, rcx);
+ // Write both characters to the new string.
+ __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx);
+ __ IncrementCounter(counters->string_add_native(), 1);
+ __ ret(2 * kPointerSize);
__ bind(&longer_than_two);
// Check if resulting string will be flat.
- __ SmiCompare(rbx, Smi::FromInt(String::kMinNonFlatLength));
+ __ SmiCompare(rbx, Smi::FromInt(ConsString::kMinLength));
__ j(below, &string_add_flat_result);
// Handle exceptionally long strings in the runtime system.
STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
__ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
- __ j(above, &string_add_runtime);
+ __ j(above, &call_runtime);
// If result is not supposed to be flat, allocate a cons string object. If
- // both strings are ascii the result is an ascii cons string.
+ // both strings are ASCII the result is an ASCII cons string.
// rax: first string
// rbx: length of resulting flat string
// rdx: second string
@@ -4428,8 +4645,8 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ testl(rcx, Immediate(kStringEncodingMask));
__ j(zero, &non_ascii);
__ bind(&ascii_data);
- // Allocate an acsii cons string.
- __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime);
+ // Allocate an ASCII cons string.
+ __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
__ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
@@ -4442,7 +4659,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
- // to contain only ascii characters.
+ // to contain only ASCII characters.
// rcx: first instance type AND second instance type.
// r8: first instance type.
// r9: second instance type.
@@ -4454,111 +4671,103 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag));
__ j(equal, &ascii_data);
// Allocate a two byte cons string.
- __ AllocateTwoByteConsString(rcx, rdi, no_reg, &string_add_runtime);
+ __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
__ jmp(&allocated);
- // Handle creating a flat result. First check that both strings are not
- // external strings.
+ // We cannot encounter sliced strings or cons strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
+ // Handle creating a flat result from either external or sequential strings.
+ // Locate the first characters' locations.
// rax: first string
// rbx: length of resulting flat string as smi
// rdx: second string
// r8: instance type of first string
// r9: instance type of first string
+ Label first_prepared, second_prepared;
+ Label first_is_sequential, second_is_sequential;
__ bind(&string_add_flat_result);
- __ SmiToInteger32(rbx, rbx);
- __ movl(rcx, r8);
- __ and_(rcx, Immediate(kStringRepresentationMask));
- __ cmpl(rcx, Immediate(kExternalStringTag));
- __ j(equal, &string_add_runtime);
- __ movl(rcx, r9);
- __ and_(rcx, Immediate(kStringRepresentationMask));
- __ cmpl(rcx, Immediate(kExternalStringTag));
- __ j(equal, &string_add_runtime);
- // We cannot encounter sliced strings here since:
- STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
- // Now check if both strings are ascii strings.
- // rax: first string
- // rbx: length of resulting flat string
- // rdx: second string
- // r8: instance type of first string
- // r9: instance type of second string
+
+ __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset));
+ // r14: length of first string
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ testb(r8, Immediate(kStringRepresentationMask));
+ __ j(zero, &first_is_sequential, Label::kNear);
+ // Rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ testb(r8, Immediate(kShortExternalStringMask));
+ __ j(not_zero, &call_runtime);
+ __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset));
+ __ jmp(&first_prepared, Label::kNear);
+ __ bind(&first_is_sequential);
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ lea(rcx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
+ __ bind(&first_prepared);
+
+ // Check whether both strings have same encoding.
+ __ xorl(r8, r9);
+ __ testb(r8, Immediate(kStringEncodingMask));
+ __ j(not_zero, &call_runtime);
+
+ __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset));
+ // r15: length of second string
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ testb(r9, Immediate(kStringRepresentationMask));
+ __ j(zero, &second_is_sequential, Label::kNear);
+ // Rule out short external string and load string resource.
+ STATIC_ASSERT(kShortExternalStringTag != 0);
+ __ testb(r9, Immediate(kShortExternalStringMask));
+ __ j(not_zero, &call_runtime);
+ __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset));
+ __ jmp(&second_prepared, Label::kNear);
+ __ bind(&second_is_sequential);
+ STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
+ __ lea(rdx, FieldOperand(rdx, SeqAsciiString::kHeaderSize));
+ __ bind(&second_prepared);
+
Label non_ascii_string_add_flat_result;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ testl(r8, Immediate(kStringEncodingMask));
+ // r9: instance type of second string
+ // First string and second string have the same encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ SmiToInteger32(rbx, rbx);
+ __ testb(r9, Immediate(kStringEncodingMask));
__ j(zero, &non_ascii_string_add_flat_result);
- __ testl(r9, Immediate(kStringEncodingMask));
- __ j(zero, &string_add_runtime);
__ bind(&make_flat_ascii_string);
- // Both strings are ascii strings. As they are short they are both flat.
- __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
- // rcx: result string
- __ movq(rbx, rcx);
+ // Both strings are ASCII strings. As they are short they are both flat.
+ __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime);
+ // rax: result string
// Locate first character of result.
- __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument
- __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
- __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // rax: first char of first argument
- // rbx: result string
- // rcx: first character of result
- // rdx: second string
- // rdi: length of first argument
- StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true);
- // Locate first character of second argument.
- __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
- __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // rbx: result string
- // rcx: next character of result
- // rdx: first char of second argument
- // rdi: length of second argument
- StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
- __ movq(rax, rbx);
+ __ lea(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize));
+ // rcx: first char of first string
+ // rbx: first character of result
+ // r14: length of first string
+ StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, true);
+ // rbx: next character of result
+ // rdx: first char of second string
+ // r15: length of second string
+ StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, true);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
- // Handle creating a flat two byte result.
- // rax: first string - known to be two byte
- // rbx: length of resulting flat string
- // rdx: second string
- // r8: instance type of first string
- // r9: instance type of first string
__ bind(&non_ascii_string_add_flat_result);
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ and_(r9, Immediate(kStringEncodingMask));
- __ j(not_zero, &string_add_runtime);
- // Both strings are two byte strings. As they are short they are both
- // flat.
- __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime);
- // rcx: result string
- __ movq(rbx, rcx);
+ // Both strings are ASCII strings. As they are short they are both flat.
+ __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime);
+ // rax: result string
// Locate first character of result.
- __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // Locate first character of first argument.
- __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset));
- __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // rax: first char of first argument
- // rbx: result string
- // rcx: first character of result
- // rdx: second argument
- // rdi: length of first argument
- StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false);
- // Locate first character of second argument.
- __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset));
- __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // rbx: result string
- // rcx: next character of result
- // rdx: first char of second argument
- // rdi: length of second argument
- StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
- __ movq(rax, rbx);
+ __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
+ // rcx: first char of first string
+ // rbx: first character of result
+ // r14: length of first string
+ StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, false);
+ // rbx: next character of result
+ // rdx: first char of second string
+ // r15: length of second string
+ StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Just jump to runtime to add the two strings.
- __ bind(&string_add_runtime);
+ __ bind(&call_runtime);
__ TailCallRuntime(Runtime::kStringAdd, 2, 1);
if (call_builtin.is_linked()) {
@@ -4752,6 +4961,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
static const int kProbes = 4;
Label found_in_symbol_table;
Label next_probe[kProbes];
+ Register candidate = scratch; // Scratch register contains candidate.
for (int i = 0; i < kProbes; i++) {
// Calculate entry in symbol table.
__ movl(scratch, hash);
@@ -4761,7 +4971,6 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ andl(scratch, mask);
// Load the entry from the symbol table.
- Register candidate = scratch; // Scratch register contains candidate.
STATIC_ASSERT(SymbolTable::kEntrySize == 1);
__ movq(candidate,
FieldOperand(symbol_table,
@@ -4776,7 +4985,12 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
__ j(equal, not_found);
- // Must be null (deleted entry).
+ // Must be the hole (deleted entry).
+ if (FLAG_debug_code) {
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+ __ cmpq(kScratchRegister, candidate);
+ __ Assert(equal, "oddball in symbol table is not undefined or the hole");
+ }
__ jmp(&next_probe[i]);
__ bind(&is_string);
@@ -4790,7 +5004,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
// JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
Register temp = kScratchRegister;
- // Check that the candidate is a non-external ascii string.
+ // Check that the candidate is a non-external ASCII string.
__ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
__ JumpIfInstanceTypeIsNotSequentialAscii(
temp, temp, &next_probe[i]);
@@ -4807,7 +5021,7 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
__ jmp(not_found);
// Scratch register contains result when we fall through to here.
- Register result = scratch;
+ Register result = candidate;
__ bind(&found_in_symbol_table);
if (!result.is(rax)) {
__ movq(rax, result);
@@ -4819,13 +5033,16 @@ void StringHelper::GenerateHashInit(MacroAssembler* masm,
Register hash,
Register character,
Register scratch) {
- // hash = character + (character << 10);
- __ movl(hash, character);
- __ shll(hash, Immediate(10));
- __ addl(hash, character);
+ // hash = (seed + character) + ((seed + character) << 10);
+ __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
+ __ SmiToInteger32(scratch, scratch);
+ __ addl(scratch, character);
+ __ movl(hash, scratch);
+ __ shll(scratch, Immediate(10));
+ __ addl(hash, scratch);
// hash ^= hash >> 6;
__ movl(scratch, hash);
- __ sarl(scratch, Immediate(6));
+ __ shrl(scratch, Immediate(6));
__ xorl(hash, scratch);
}
@@ -4842,7 +5059,7 @@ void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
__ addl(hash, scratch);
// hash ^= hash >> 6;
__ movl(scratch, hash);
- __ sarl(scratch, Immediate(6));
+ __ shrl(scratch, Immediate(6));
__ xorl(hash, scratch);
}
@@ -4854,17 +5071,19 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
__ leal(hash, Operand(hash, hash, times_8, 0));
// hash ^= hash >> 11;
__ movl(scratch, hash);
- __ sarl(scratch, Immediate(11));
+ __ shrl(scratch, Immediate(11));
__ xorl(hash, scratch);
// hash += hash << 15;
__ movl(scratch, hash);
__ shll(scratch, Immediate(15));
__ addl(hash, scratch);
+ __ andl(hash, Immediate(String::kHashBitMask));
+
// if (hash == 0) hash = 27;
Label hash_not_zero;
__ j(not_zero, &hash_not_zero);
- __ Set(hash, 27);
+ __ Set(hash, StringHasher::kZeroHash);
__ bind(&hash_not_zero);
}
@@ -4900,8 +5119,12 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
__ cmpq(FieldOperand(rax, String::kLengthOffset), rcx);
- Label return_rax;
- __ j(equal, &return_rax);
+ Label not_original_string;
+ __ j(not_equal, &not_original_string, Label::kNear);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->sub_string_native(), 1);
+ __ ret(kArgumentsSize);
+ __ bind(&not_original_string);
// Special handling of sub-strings of length 1 and 2. One character strings
// are handled in the runtime system (looked up in the single character
// cache). Two character strings are looked for in the symbol cache.
@@ -4920,71 +5143,77 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Get the two characters forming the sub string.
__ SmiToInteger32(rdx, rdx); // From index is no longer smi.
__ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
- __ movzxbq(rcx,
+ __ movzxbq(rdi,
FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1));
// Try to lookup two character string in symbol table.
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
- masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string);
+ masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string);
+ __ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
__ bind(&make_two_character_string);
- // Setup registers for allocating the two character string.
- __ movq(rax, Operand(rsp, kStringOffset));
- __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ // Set up registers for allocating the two character string.
+ __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize));
+ __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
+ __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx);
+ __ IncrementCounter(counters->sub_string_native(), 1);
+ __ ret(3 * kPointerSize);
+
+ __ bind(&result_longer_than_two);
+ // rax: string
+ // rbx: instance type
+ // rcx: sub string length
+ // rdx: from index (smi)
+ // Deal with different string types: update the index if necessary
+ // and put the underlying string into edi.
+ Label underlying_unpacked, sliced_string, seq_or_external_string;
+ // If the string is not indirect, it can only be sequential or external.
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ testb(rbx, Immediate(kIsIndirectStringMask));
+ __ j(zero, &seq_or_external_string, Label::kNear);
+
+ __ testb(rbx, Immediate(kSlicedNotConsMask));
+ __ j(not_zero, &sliced_string, Label::kNear);
+ // Cons string. Check whether it is flat, then fetch first part.
+ // Flat cons strings have an empty second part.
+ __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
+ Heap::kEmptyStringRootIndex);
+ __ j(not_equal, &runtime);
+ __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset));
+ // Update instance type.
+ __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
+ __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
+ __ jmp(&underlying_unpacked, Label::kNear);
+
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and correct start index by offset.
+ __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
+ __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset));
+ // Update instance type.
+ __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
__ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
- __ Set(rcx, 2);
+ __ jmp(&underlying_unpacked, Label::kNear);
+
+ __ bind(&seq_or_external_string);
+ // Sequential or external string. Just move string to the correct register.
+ __ movq(rdi, rax);
+
+ __ bind(&underlying_unpacked);
if (FLAG_string_slices) {
Label copy_routine;
+ // rdi: underlying subject string
+ // rbx: instance type of underlying subject string
+ // rdx: adjusted start index (smi)
+ // rcx: length
// If coming from the make_two_character_string path, the string
// is too short to be sliced anyways.
- STATIC_ASSERT(2 < SlicedString::kMinLength);
- __ jmp(&copy_routine);
- __ bind(&result_longer_than_two);
-
- // rax: string
- // rbx: instance type
- // rcx: sub string length
- // rdx: from index (smi)
- Label allocate_slice, sliced_string, seq_string;
__ cmpq(rcx, Immediate(SlicedString::kMinLength));
// Short slice. Copy instead of slicing.
__ j(less, &copy_routine);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ testb(rbx, Immediate(kStringRepresentationMask));
- __ j(zero, &seq_string, Label::kNear);
- STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
- STATIC_ASSERT(kIsIndirectStringMask != 0);
- __ testb(rbx, Immediate(kIsIndirectStringMask));
- // External string. Jump to runtime.
- __ j(zero, &runtime);
-
- __ testb(rbx, Immediate(kSlicedNotConsMask));
- __ j(not_zero, &sliced_string, Label::kNear);
- // Cons string. Check whether it is flat, then fetch first part.
- __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
- Heap::kEmptyStringRootIndex);
- __ j(not_equal, &runtime);
- __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset));
- __ jmp(&allocate_slice, Label::kNear);
-
- __ bind(&sliced_string);
- // Sliced string. Fetch parent and correct start index by offset.
- __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
- __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset));
- __ jmp(&allocate_slice, Label::kNear);
-
- __ bind(&seq_string);
- // Sequential string. Just move string to the right register.
- __ movq(rdi, rax);
-
- __ bind(&allocate_slice);
- // edi: underlying subject string
- // ebx: instance type of original subject string
- // edx: offset
- // ecx: length
// Allocate new sliced string. At this point we do not reload the instance
// type including the string encoding because we simply rely on the info
// provided by the original string. It does not matter if the original
@@ -4995,93 +5224,96 @@ void SubStringStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ testb(rbx, Immediate(kStringEncodingMask));
__ j(zero, &two_byte_slice, Label::kNear);
- __ AllocateAsciiSlicedString(rax, rbx, no_reg, &runtime);
+ __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime);
__ jmp(&set_slice_header, Label::kNear);
__ bind(&two_byte_slice);
- __ AllocateTwoByteSlicedString(rax, rbx, no_reg, &runtime);
+ __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
__ bind(&set_slice_header);
- __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
__ Integer32ToSmi(rcx, rcx);
__ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
- __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
__ movq(FieldOperand(rax, SlicedString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
- __ jmp(&return_rax);
+ __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
+ __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
+ __ IncrementCounter(counters->sub_string_native(), 1);
+ __ ret(kArgumentsSize);
__ bind(&copy_routine);
- } else {
- __ bind(&result_longer_than_two);
}
- // rax: string
- // rbx: instance type
- // rcx: result string length
- // Check for flat ascii string
- Label non_ascii_flat;
- __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat);
+ // rdi: underlying subject string
+ // rbx: instance type of underlying subject string
+ // rdx: adjusted start index (smi)
+ // rcx: length
+ // The subject string can only be external or sequential string of either
+ // encoding at this point.
+ Label two_byte_sequential, sequential_string;
+ STATIC_ASSERT(kExternalStringTag != 0);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ testb(rbx, Immediate(kExternalStringTag));
+ __ j(zero, &sequential_string);
+
+ // Handle external string.
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ testb(rbx, Immediate(kShortExternalStringMask));
+ __ j(not_zero, &runtime);
+ __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
+ // Move the pointer so that offset-wise, it looks like a sequential string.
+ STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize);
+ __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+
+ __ bind(&sequential_string);
+ STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0);
+ __ testb(rbx, Immediate(kStringEncodingMask));
+ __ j(zero, &two_byte_sequential);
// Allocate the result.
- __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime);
+ __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
// rax: result string
// rcx: result string length
- __ movq(rdx, rsi); // esi used by following code.
- // Locate first character of result.
- __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
- // Load string argument and locate character of sub string start.
- __ movq(rsi, Operand(rsp, kStringOffset));
- __ movq(rbx, Operand(rsp, kFromOffset));
- {
- SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1);
- __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
+ __ movq(r14, rsi); // esi used by following code.
+ { // Locate character of sub string start.
+ SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
+ __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
SeqAsciiString::kHeaderSize - kHeapObjectTag));
}
+ // Locate first character of result.
+ __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize));
// rax: result string
// rcx: result length
- // rdx: original value of rsi
// rdi: first character of result
// rsi: character of sub string start
+ // r14: original value of rsi
StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
- __ movq(rsi, rdx); // Restore rsi.
- Counters* counters = masm->isolate()->counters();
+ __ movq(rsi, r14); // Restore rsi.
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
- __ bind(&non_ascii_flat);
- // rax: string
- // rbx: instance type & kStringRepresentationMask | kStringEncodingMask
- // rcx: result string length
- // Check for sequential two byte string
- __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag));
- __ j(not_equal, &runtime);
-
+ __ bind(&two_byte_sequential);
// Allocate the result.
- __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime);
+ __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
// rax: result string
// rcx: result string length
- __ movq(rdx, rsi); // esi used by following code.
- // Locate first character of result.
- __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
- // Load string argument and locate character of sub string start.
- __ movq(rsi, Operand(rsp, kStringOffset));
- __ movq(rbx, Operand(rsp, kFromOffset));
- {
- SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2);
- __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale,
+ __ movq(r14, rsi); // esi used by following code.
+ { // Locate character of sub string start.
+ SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
+ __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
SeqAsciiString::kHeaderSize - kHeapObjectTag));
}
+ // Locate first character of result.
+ __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
// rax: result string
// rcx: result length
- // rdx: original value of rsi
// rdi: first character of result
// rsi: character of sub string start
+ // r14: original value of rsi
StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
- __ movq(rsi, rdx); // Restore esi.
-
- __ bind(&return_rax);
+ __ movq(rsi, r14); // Restore esi.
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
@@ -5222,7 +5454,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
__ movb(scratch, Operand(left, index, times_1, 0));
__ cmpb(scratch, Operand(right, index, times_1, 0));
__ j(not_equal, chars_not_equal, near_jump);
- __ addq(index, Immediate(1));
+ __ incq(index);
__ j(not_zero, &loop);
}
@@ -5252,7 +5484,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
// Check that both are sequential ASCII strings.
__ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
- // Inline comparison of ascii strings.
+ // Inline comparison of ASCII strings.
__ IncrementCounter(counters->string_compare_native(), 1);
// Drop arguments from the stack
__ pop(rcx);
@@ -5295,15 +5527,15 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::HEAP_NUMBERS);
Label generic_stub;
- Label unordered;
+ Label unordered, maybe_undefined1, maybe_undefined2;
Label miss;
Condition either_smi = masm->CheckEitherSmi(rax, rdx);
__ j(either_smi, &generic_stub, Label::kNear);
__ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined1, Label::kNear);
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
- __ j(not_equal, &miss, Label::kNear);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
// Load left and right operand
__ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
@@ -5324,11 +5556,25 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) {
__ ret(0);
__ bind(&unordered);
-
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
__ bind(&generic_stub);
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
+ __ bind(&maybe_undefined1);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ Cmp(rax, masm->isolate()->factory()->undefined_value());
+ __ j(not_equal, &miss);
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
+ __ j(not_equal, &maybe_undefined2, Label::kNear);
+ __ jmp(&unordered);
+ }
+
+ __ bind(&maybe_undefined2);
+ if (Token::IsOrderedRelationalCompareOp(op_)) {
+ __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
+ __ j(equal, &unordered);
+ }
+
__ bind(&miss);
GenerateMiss(masm);
}
@@ -5379,9 +5625,10 @@ void ICCompareStub::GenerateSymbols(MacroAssembler* masm) {
void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
ASSERT(state_ == CompareIC::STRINGS);
- ASSERT(GetCondition() == equal);
Label miss;
+ bool equality = Token::IsEqualityOp(op_);
+
// Registers containing left and right operands respectively.
Register left = rdx;
Register right = rax;
@@ -5419,24 +5666,31 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
// Check that both strings are symbols. If they are, we're done
// because we already know they are not identical.
- Label do_compare;
- STATIC_ASSERT(kSymbolTag != 0);
- __ and_(tmp1, tmp2);
- __ testb(tmp1, Immediate(kIsSymbolMask));
- __ j(zero, &do_compare, Label::kNear);
- // Make sure rax is non-zero. At this point input operands are
- // guaranteed to be non-zero.
- ASSERT(right.is(rax));
- __ ret(0);
+ if (equality) {
+ Label do_compare;
+ STATIC_ASSERT(kSymbolTag != 0);
+ __ and_(tmp1, tmp2);
+ __ testb(tmp1, Immediate(kIsSymbolMask));
+ __ j(zero, &do_compare, Label::kNear);
+ // Make sure rax is non-zero. At this point input operands are
+ // guaranteed to be non-zero.
+ ASSERT(right.is(rax));
+ __ ret(0);
+ __ bind(&do_compare);
+ }
// Check that both strings are sequential ASCII.
Label runtime;
- __ bind(&do_compare);
__ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
// Compare flat ASCII strings. Returns when done.
- StringCompareStub::GenerateFlatAsciiStringEquals(
- masm, left, right, tmp1, tmp2);
+ if (equality) {
+ StringCompareStub::GenerateFlatAsciiStringEquals(
+ masm, left, right, tmp1, tmp2);
+ } else {
+ StringCompareStub::GenerateCompareFlatAsciiStrings(
+ masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
+ }
// Handle more complex cases in runtime.
__ bind(&runtime);
@@ -5444,7 +5698,11 @@ void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
__ push(left);
__ push(right);
__ push(tmp1);
- __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ if (equality) {
+ __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
+ } else {
+ __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
+ }
__ bind(&miss);
GenerateMiss(masm);
@@ -5459,14 +5717,8 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
__ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, Label::kNear);
- __ testb(FieldOperand(rcx, Map::kBitField2Offset),
- Immediate(1 << Map::kUseUserObjectComparison));
- __ j(not_zero, &miss, Label::kNear);
__ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
__ j(not_equal, &miss, Label::kNear);
- __ testb(FieldOperand(rcx, Map::kBitField2Offset),
- Immediate(1 << Map::kUseUserObjectComparison));
- __ j(not_zero, &miss, Label::kNear);
ASSERT(GetCondition() == equal);
__ subq(rax, rdx);
@@ -5477,32 +5729,45 @@ void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
}
-void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
- // Save the registers.
- __ pop(rcx);
- __ push(rdx);
- __ push(rax);
- __ push(rcx);
+void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
+ Label miss;
+ Condition either_smi = masm->CheckEitherSmi(rdx, rax);
+ __ j(either_smi, &miss, Label::kNear);
+
+ __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset));
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+ __ Cmp(rcx, known_map_);
+ __ j(not_equal, &miss, Label::kNear);
+ __ Cmp(rbx, known_map_);
+ __ j(not_equal, &miss, Label::kNear);
+
+ __ subq(rax, rdx);
+ __ ret(0);
+
+ __ bind(&miss);
+ GenerateMiss(masm);
+}
+
- // Call the runtime system in a fresh internal frame.
- ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
+ // Call the runtime system in a fresh internal frame.
+ ExternalReference miss =
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(rdx);
__ push(rax);
+ __ push(rdx);
+ __ push(rax);
__ Push(Smi::FromInt(op_));
__ CallExternalReference(miss, 3);
- }
-
- // Compute the entry point of the rewritten stub.
- __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
- // Restore registers.
- __ pop(rcx);
- __ pop(rax);
- __ pop(rdx);
- __ push(rcx);
+ // Compute the entry point of the rewritten stub.
+ __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
+ __ pop(rax);
+ __ pop(rdx);
+ }
// Do a tail call to the rewritten stub.
__ jmp(rdi);
@@ -5519,7 +5784,7 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
// not equal to the name and kProbes-th slot is not used (its name is the
// undefined value), it guarantees the hash table doesn't contain the
// property. It's true even if some slots represent deleted properties
- // (their names are the null value).
+ // (their names are the hole value).
for (int i = 0; i < kInlinedProbes; i++) {
// r0 points to properties hash.
// Compute the masked index: (hash + i + i * i) & mask.
@@ -5548,11 +5813,18 @@ void StringDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ Cmp(entity_name, Handle<String>(name));
__ j(equal, miss);
+ Label the_hole;
+ // Check for the hole and skip.
+ __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
+ __ j(equal, &the_hole, Label::kNear);
+
// Check if the entry name is not a symbol.
__ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
Immediate(kIsSymbolMask));
__ j(zero, miss);
+
+ __ bind(&the_hole);
}
StringDictionaryLookupStub stub(properties,
@@ -5719,38 +5991,45 @@ struct AheadOfTimeWriteBarrierStubList {
};
+#define REG(Name) { kRegister_ ## Name ## _Code }
+
struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
// Used in RegExpExecStub.
- { rbx, rax, rdi, EMIT_REMEMBERED_SET },
+ { REG(rbx), REG(rax), REG(rdi), EMIT_REMEMBERED_SET },
// Used in CompileArrayPushCall.
- { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
+ { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
// Used in CompileStoreGlobal.
- { rbx, rcx, rdx, OMIT_REMEMBERED_SET },
+ { REG(rbx), REG(rcx), REG(rdx), OMIT_REMEMBERED_SET },
// Used in StoreStubCompiler::CompileStoreField and
// KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
- { rdx, rcx, rbx, EMIT_REMEMBERED_SET },
+ { REG(rdx), REG(rcx), REG(rbx), EMIT_REMEMBERED_SET },
// GenerateStoreField calls the stub with two different permutations of
// registers. This is the second.
- { rbx, rcx, rdx, EMIT_REMEMBERED_SET },
+ { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
// StoreIC::GenerateNormal via GenerateDictionaryStore.
- { rbx, r8, r9, EMIT_REMEMBERED_SET },
+ { REG(rbx), REG(r8), REG(r9), EMIT_REMEMBERED_SET },
// KeyedStoreIC::GenerateGeneric.
- { rbx, rdx, rcx, EMIT_REMEMBERED_SET},
+ { REG(rbx), REG(rdx), REG(rcx), EMIT_REMEMBERED_SET},
// KeyedStoreStubCompiler::GenerateStoreFastElement.
- { rdi, rdx, rcx, EMIT_REMEMBERED_SET},
+ { REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
+ { REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateSmiOnlyToObject
// and ElementsTransitionGenerator::GenerateSmiOnlyToObject
// and ElementsTransitionGenerator::GenerateDoubleToObject
- { rdx, rbx, rdi, EMIT_REMEMBERED_SET},
+ { REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
+ { REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateSmiOnlyToDouble
// and ElementsTransitionGenerator::GenerateDoubleToObject
- { rdx, r11, r15, EMIT_REMEMBERED_SET},
+ { REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
// ElementsTransitionGenerator::GenerateDoubleToObject
- { r11, rax, r15, EMIT_REMEMBERED_SET},
+ { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
+ // StoreArrayLiteralElementStub::Generate
+ { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
// Null termination.
- { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
+ { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
+#undef REG
bool RecordWriteStub::IsPregenerated() {
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
@@ -5994,6 +6273,81 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
// Fall through when we need to inform the incremental marker.
}
+
+void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : element value to store
+ // -- rbx : array literal
+ // -- rdi : map of array literal
+ // -- rcx : element index as smi
+ // -- rdx : array literal index in function
+ // -- rsp[0] : return address
+ // -----------------------------------
+
+ Label element_done;
+ Label double_elements;
+ Label smi_element;
+ Label slow_elements;
+ Label fast_elements;
+
+ __ CheckFastElements(rdi, &double_elements);
+
+ // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
+ __ JumpIfSmi(rax, &smi_element);
+ __ CheckFastSmiOnlyElements(rdi, &fast_elements);
+
+ // Store into the array literal requires a elements transition. Call into
+ // the runtime.
+
+ __ bind(&slow_elements);
+ __ pop(rdi); // Pop return address and remember to put back later for tail
+ // call.
+ __ push(rbx);
+ __ push(rcx);
+ __ push(rax);
+ __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
+ __ push(rdx);
+ __ push(rdi); // Return return address so that tail call returns to right
+ // place.
+ __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
+
+ // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
+ __ bind(&fast_elements);
+ __ SmiToInteger32(kScratchRegister, rcx);
+ __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
+ __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
+ FixedArrayBase::kHeaderSize));
+ __ movq(Operand(rcx, 0), rax);
+ // Update the write barrier for the array store.
+ __ RecordWrite(rbx, rcx, rax,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ ret(0);
+
+ // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
+ // FAST_ELEMENTS, and value is Smi.
+ __ bind(&smi_element);
+ __ SmiToInteger32(kScratchRegister, rcx);
+ __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
+ __ movq(FieldOperand(rbx, kScratchRegister, times_pointer_size,
+ FixedArrayBase::kHeaderSize), rax);
+ __ ret(0);
+
+ // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
+ __ bind(&double_elements);
+
+ __ movq(r9, FieldOperand(rbx, JSObject::kElementsOffset));
+ __ SmiToInteger32(r11, rcx);
+ __ StoreNumberToDoubleElements(rax,
+ r9,
+ r11,
+ xmm0,
+ &slow_elements);
+ __ ret(0);
+}
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/x64/code-stubs-x64.h b/src/3rdparty/v8/src/x64/code-stubs-x64.h
index 34435d7..6a1a18f 100644
--- a/src/3rdparty/v8/src/x64/code-stubs-x64.h
+++ b/src/3rdparty/v8/src/x64/code-stubs-x64.h
@@ -48,6 +48,8 @@ class TranscendentalCacheStub: public CodeStub {
ArgumentType argument_type)
: type_(type), argument_type_(argument_type) {}
void Generate(MacroAssembler* masm);
+ static void GenerateOperation(MacroAssembler* masm,
+ TranscendentalCache::Type type);
private:
TranscendentalCache::Type type_;
ArgumentType argument_type_;
@@ -55,7 +57,6 @@ class TranscendentalCacheStub: public CodeStub {
Major MajorKey() { return TranscendentalCache; }
int MinorKey() { return type_ | argument_type_; }
Runtime::FunctionId RuntimeFunction();
- void GenerateOperation(MacroAssembler* masm);
};
@@ -150,7 +151,7 @@ class UnaryOpStub: public CodeStub {
return UnaryOpIC::ToState(operand_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_unary_op_type(operand_type_);
}
};
@@ -236,7 +237,7 @@ class BinaryOpStub: public CodeStub {
return BinaryOpIC::ToState(operands_type_);
}
- virtual void FinishCode(Code* code) {
+ virtual void FinishCode(Handle<Code> code) {
code->set_binary_op_type(operands_type_);
code->set_binary_op_result_type(result_type_);
}
@@ -693,13 +694,6 @@ class RecordWriteStub: public CodeStub {
SaveFPRegsModeBits::encode(save_fp_regs_mode_);
}
- bool MustBeInStubCache() {
- // All stubs must be registered in the stub cache
- // otherwise IncrementalMarker would not be able to find
- // and patch it.
- return true;
- }
-
void Activate(Code* code) {
code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
}
diff --git a/src/3rdparty/v8/src/x64/codegen-x64.cc b/src/3rdparty/v8/src/x64/codegen-x64.cc
index a4880d2..a8d39b2 100644
--- a/src/3rdparty/v8/src/x64/codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/codegen-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -54,6 +54,75 @@ void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
#define __ masm.
+
+UnaryMathFunction CreateTranscendentalFunction(TranscendentalCache::Type type) {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ if (buffer == NULL) {
+ // Fallback to library function if function cannot be created.
+ switch (type) {
+ case TranscendentalCache::SIN: return &sin;
+ case TranscendentalCache::COS: return &cos;
+ case TranscendentalCache::TAN: return &tan;
+ case TranscendentalCache::LOG: return &log;
+ default: UNIMPLEMENTED();
+ }
+ }
+
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // xmm0: raw double input.
+ // Move double input into registers.
+ __ push(rbx);
+ __ push(rdi);
+ __ movq(rbx, xmm0);
+ __ push(rbx);
+ __ fld_d(Operand(rsp, 0));
+ TranscendentalCacheStub::GenerateOperation(&masm, type);
+ // The return value is expected to be in xmm0.
+ __ fstp_d(Operand(rsp, 0));
+ __ pop(rbx);
+ __ movq(xmm0, rbx);
+ __ pop(rdi);
+ __ pop(rbx);
+ __ Ret();
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<UnaryMathFunction>(buffer);
+}
+
+
+UnaryMathFunction CreateSqrtFunction() {
+ size_t actual_size;
+ // Allocate buffer in executable space.
+ byte* buffer = static_cast<byte*>(OS::Allocate(1 * KB,
+ &actual_size,
+ true));
+ if (buffer == NULL) return &sqrt;
+
+ MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
+ // xmm0: raw double input.
+ // Move double input into registers.
+ __ sqrtsd(xmm0, xmm0);
+ __ Ret();
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ ASSERT(desc.reloc_size == 0);
+
+ CPU::FlushICache(buffer, actual_size);
+ OS::ProtectCode(buffer, actual_size);
+ return FUNCTION_CAST<UnaryMathFunction>(buffer);
+}
+
+
#ifdef _WIN64
typedef double (*ModuloFunction)(double, double);
// Define custom fmod implementation.
@@ -182,19 +251,28 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// -- rsp[0] : return address
// -----------------------------------
// The fail label is not actually used since we do not allocate.
- Label allocated, cow_array;
+ Label allocated, new_backing_store, only_change_map, done;
- // Check backing store for COW-ness. If the negative case, we do not have to
- // allocate a new array, since FixedArray and FixedDoubleArray do not differ
- // in size.
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
__ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
+ __ j(equal, &only_change_map);
+
+ // Check backing store for COW-ness. For COW arrays we have to
+ // allocate a new backing store.
__ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
__ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
Heap::kFixedCOWArrayMapRootIndex);
- __ j(equal, &cow_array);
+ __ j(equal, &new_backing_store);
+ // Check if the backing store is in new-space. If not, we need to allocate
+ // a new one since the old one is in pointer-space.
+ // If in new space, we can reuse the old backing store because it is
+ // the same size.
+ __ JumpIfNotInNewSpace(r8, rdi, &new_backing_store);
+
__ movq(r14, r8); // Destination array equals source array.
- __ bind(&allocated);
// r8 : source FixedArray
// r9 : elements array length
// r14: destination FixedDoubleArray
@@ -202,6 +280,7 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
__ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi);
+ __ bind(&allocated);
// Set transitioned map.
__ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
__ RecordWriteField(rdx,
@@ -222,10 +301,13 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
// r15: the-hole NaN
__ jmp(&entry);
- // Allocate new array if the source array is a COW array.
- __ bind(&cow_array);
+ // Allocate new backing store.
+ __ bind(&new_backing_store);
__ lea(rdi, Operand(r9, times_pointer_size, FixedArray::kHeaderSize));
__ AllocateInNewSpace(rdi, r14, r11, r15, fail, TAG_OBJECT);
+ // Set backing store's map
+ __ LoadRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
+ __ movq(FieldOperand(r14, HeapObject::kMapOffset), rdi);
// Set receiver's backing store.
__ movq(FieldOperand(rdx, JSObject::kElementsOffset), r14);
__ movq(r11, r14);
@@ -241,6 +323,18 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
__ movq(FieldOperand(r14, FixedDoubleArray::kLengthOffset), r11);
__ jmp(&allocated);
+ __ bind(&only_change_map);
+ // Set transitioned map.
+ __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
+ __ RecordWriteField(rdx,
+ HeapObject::kMapOffset,
+ rbx,
+ rdi,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+ __ jmp(&done);
+
// Conversion loop.
__ bind(&loop);
__ movq(rbx,
@@ -254,10 +348,18 @@ void ElementsTransitionGenerator::GenerateSmiOnlyToDouble(
xmm0);
__ jmp(&entry);
__ bind(&convert_hole);
+
+ if (FLAG_debug_code) {
+ __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
+ __ Assert(equal, "object found in smi-only array");
+ }
+
__ movq(FieldOperand(r14, r9, times_8, FixedDoubleArray::kHeaderSize), r15);
__ bind(&entry);
__ decq(r9);
__ j(not_sign, &loop);
+
+ __ bind(&done);
}
@@ -270,7 +372,14 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- Label loop, entry, convert_hole, gc_required;
+ Label loop, entry, convert_hole, gc_required, only_change_map;
+
+ // Check for empty arrays, which only require a map transition and no changes
+ // to the backing store.
+ __ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ CompareRoot(r8, Heap::kEmptyFixedArrayRootIndex);
+ __ j(equal, &only_change_map);
+
__ push(rax);
__ movq(r8, FieldOperand(rdx, JSObject::kElementsOffset));
@@ -339,15 +448,6 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
__ decq(r9);
__ j(not_sign, &loop);
- // Set transitioned map.
- __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
- __ RecordWriteField(rdx,
- HeapObject::kMapOffset,
- rbx,
- rdi,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
// Replace receiver's backing store with newly created and filled FixedArray.
__ movq(FieldOperand(rdx, JSObject::kElementsOffset), r11);
__ RecordWriteField(rdx,
@@ -359,6 +459,120 @@ void ElementsTransitionGenerator::GenerateDoubleToObject(
OMIT_SMI_CHECK);
__ pop(rax);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+
+ __ bind(&only_change_map);
+ // Set transitioned map.
+ __ movq(FieldOperand(rdx, HeapObject::kMapOffset), rbx);
+ __ RecordWriteField(rdx,
+ HeapObject::kMapOffset,
+ rbx,
+ rdi,
+ kDontSaveFPRegs,
+ OMIT_REMEMBERED_SET,
+ OMIT_SMI_CHECK);
+}
+
+
+void StringCharLoadGenerator::Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime) {
+ // Fetch the instance type of the receiver into result register.
+ __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
+
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ testb(result, Immediate(kIsIndirectStringMask));
+ __ j(zero, &check_sequential, Label::kNear);
+
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ testb(result, Immediate(kSlicedNotConsMask));
+ __ j(zero, &cons_string, Label::kNear);
+
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
+ __ addq(index, result);
+ __ movq(string, FieldOperand(string, SlicedString::kParentOffset));
+ __ jmp(&indirect_string_loaded, Label::kNear);
+
+ // Handle cons strings.
+ // Check whether the right hand side is the empty string (i.e. if
+ // this is really a flat string in a cons string). If that is not
+ // the case we would rather go to the runtime system now to flatten
+ // the string.
+ __ bind(&cons_string);
+ __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
+ Heap::kEmptyStringRootIndex);
+ __ j(not_equal, call_runtime);
+ __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
+ __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
+ __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
+
+ // Distinguish sequential and external strings. Only these two string
+ // representations can reach here (slices and flat cons strings have been
+ // reduced to the underlying sequential or external string).
+ Label seq_string;
+ __ bind(&check_sequential);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ testb(result, Immediate(kStringRepresentationMask));
+ __ j(zero, &seq_string, Label::kNear);
+
+ // Handle external strings.
+ Label ascii_external, done;
+ if (FLAG_debug_code) {
+ // Assert that we do not have a cons or slice (indirect strings) here.
+ // Sequential strings have already been ruled out.
+ __ testb(result, Immediate(kIsIndirectStringMask));
+ __ Assert(zero, "external string expected, but not found");
+ }
+ // Rule out short external strings.
+ STATIC_CHECK(kShortExternalStringTag != 0);
+ __ testb(result, Immediate(kShortExternalStringTag));
+ __ j(not_zero, call_runtime);
+ // Check encoding.
+ STATIC_ASSERT(kTwoByteStringTag == 0);
+ __ testb(result, Immediate(kStringEncodingMask));
+ __ movq(result, FieldOperand(string, ExternalString::kResourceDataOffset));
+ __ j(not_equal, &ascii_external, Label::kNear);
+ // Two-byte string.
+ __ movzxwl(result, Operand(result, index, times_2, 0));
+ __ jmp(&done, Label::kNear);
+ __ bind(&ascii_external);
+ // Ascii string.
+ __ movzxbl(result, Operand(result, index, times_1, 0));
+ __ jmp(&done, Label::kNear);
+
+ // Dispatch on the encoding: ASCII or two-byte.
+ Label ascii;
+ __ bind(&seq_string);
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+ __ testb(result, Immediate(kStringEncodingMask));
+ __ j(not_zero, &ascii, Label::kNear);
+
+ // Two-byte string.
+ // Load the two-byte character code into the result register.
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ __ movzxwl(result, FieldOperand(string,
+ index,
+ times_2,
+ SeqTwoByteString::kHeaderSize));
+ __ jmp(&done, Label::kNear);
+
+ // ASCII string.
+ // Load the byte into the result register.
+ __ bind(&ascii);
+ __ movzxbl(result, FieldOperand(string,
+ index,
+ times_1,
+ SeqAsciiString::kHeaderSize));
+ __ bind(&done);
}
#undef __
diff --git a/src/3rdparty/v8/src/x64/codegen-x64.h b/src/3rdparty/v8/src/x64/codegen-x64.h
index a0648ce..2e80751 100644
--- a/src/3rdparty/v8/src/x64/codegen-x64.h
+++ b/src/3rdparty/v8/src/x64/codegen-x64.h
@@ -69,6 +69,21 @@ class CodeGenerator: public AstVisitor {
};
+class StringCharLoadGenerator : public AllStatic {
+ public:
+ // Generates the code for handling different string types and loading the
+ // indexed character into |result|. We expect |index| as untagged input and
+ // |result| as untagged output.
+ static void Generate(MacroAssembler* masm,
+ Register string,
+ Register index,
+ Register result,
+ Label* call_runtime);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(StringCharLoadGenerator);
+};
+
} } // namespace v8::internal
#endif // V8_X64_CODEGEN_X64_H_
diff --git a/src/3rdparty/v8/src/x64/cpu-x64.cc b/src/3rdparty/v8/src/x64/cpu-x64.cc
index 9dc3b4f..80e22c6 100644
--- a/src/3rdparty/v8/src/x64/cpu-x64.cc
+++ b/src/3rdparty/v8/src/x64/cpu-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -41,7 +41,7 @@
namespace v8 {
namespace internal {
-void CPU::Setup() {
+void CPU::SetUp() {
CpuFeatures::Probe();
}
diff --git a/src/3rdparty/v8/src/x64/debug-x64.cc b/src/3rdparty/v8/src/x64/debug-x64.cc
index 2149fc2..eec83d9 100644
--- a/src/3rdparty/v8/src/x64/debug-x64.cc
+++ b/src/3rdparty/v8/src/x64/debug-x64.cc
@@ -229,33 +229,56 @@ void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) {
}
-void Debug::GenerateConstructCallDebugBreak(MacroAssembler* masm) {
+void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
// Register state just before return from JS function (from codegen-x64.cc).
- // rax is the actual number of arguments not encoded as a smi, see comment
- // above IC call.
// ----------- S t a t e -------------
- // -- rax: number of arguments
+ // -- rax: return value
// -----------------------------------
- // The number of arguments in rax is not smi encoded.
- Generate_DebugBreakCallHelper(masm, rdi.bit(), rax.bit(), false);
+ Generate_DebugBreakCallHelper(masm, rax.bit(), 0, true);
}
-void Debug::GenerateReturnDebugBreak(MacroAssembler* masm) {
- // Register state just before return from JS function (from codegen-x64.cc).
+void Debug::GenerateCallFunctionStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-x64.cc).
// ----------- S t a t e -------------
- // -- rax: return value
+ // -- rdi : function
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, rax.bit(), 0, true);
+ Generate_DebugBreakCallHelper(masm, rdi.bit(), 0, false);
+}
+
+
+void Debug::GenerateCallFunctionStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallFunctionStub (from code-stubs-x64.cc).
+ // ----------- S t a t e -------------
+ // -- rdi : function
+ // -- rbx: cache cell for call target
+ // -----------------------------------
+ Generate_DebugBreakCallHelper(masm, rbx.bit() | rdi.bit(), 0, false);
+}
+
+
+void Debug::GenerateCallConstructStubDebugBreak(MacroAssembler* masm) {
+ // Register state for CallConstructStub (from code-stubs-x64.cc).
+ // rax is the actual number of arguments not encoded as a smi, see comment
+ // above IC call.
+ // ----------- S t a t e -------------
+ // -- rax: number of arguments
+ // -----------------------------------
+ // The number of arguments in rax is not smi encoded.
+ Generate_DebugBreakCallHelper(masm, rdi.bit(), rax.bit(), false);
}
-void Debug::GenerateStubNoRegistersDebugBreak(MacroAssembler* masm) {
- // Register state for stub CallFunction (from CallFunctionStub in ic-x64.cc).
+void Debug::GenerateCallConstructStubRecordDebugBreak(MacroAssembler* masm) {
+ // Register state for CallConstructStub (from code-stubs-x64.cc).
+ // rax is the actual number of arguments not encoded as a smi, see comment
+ // above IC call.
// ----------- S t a t e -------------
- // No registers used on entry.
+ // -- rax: number of arguments
+ // -- rbx: cache cell for call target
// -----------------------------------
- Generate_DebugBreakCallHelper(masm, 0, 0, false);
+ // The number of arguments in rax is not smi encoded.
+ Generate_DebugBreakCallHelper(masm, rbx.bit() | rdi.bit(), rax.bit(), false);
}
@@ -264,9 +287,7 @@ void Debug::GenerateSlot(MacroAssembler* masm) {
Label check_codesize;
__ bind(&check_codesize);
__ RecordDebugBreakSlot();
- for (int i = 0; i < Assembler::kDebugBreakSlotLength; i++) {
- __ nop();
- }
+ __ Nop(Assembler::kDebugBreakSlotLength);
ASSERT_EQ(Assembler::kDebugBreakSlotLength,
masm->SizeOfCodeGeneratedSince(&check_codesize));
}
diff --git a/src/3rdparty/v8/src/x64/deoptimizer-x64.cc b/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
index d0a052b..40b9a1c 100644
--- a/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
+++ b/src/3rdparty/v8/src/x64/deoptimizer-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -42,67 +42,7 @@ const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
- return MacroAssembler::kCallInstructionLength;
-}
-
-
-#ifdef DEBUG
-// Overwrites code with int3 instructions.
-static void ZapCodeRange(Address from, Address to) {
- CHECK(from <= to);
- int length = static_cast<int>(to - from);
- CodePatcher destroyer(from, length);
- while (length-- > 0) {
- destroyer.masm()->int3();
- }
-}
-#endif
-
-
-// Iterate through the entries of a SafepointTable that corresponds to
-// deoptimization points.
-class SafepointTableDeoptimiztionEntryIterator {
- public:
- explicit SafepointTableDeoptimiztionEntryIterator(Code* code)
- : code_(code), table_(code), index_(-1), limit_(table_.length()) {
- FindNextIndex();
- }
-
- SafepointEntry Next(Address* pc) {
- if (index_ >= limit_) {
- *pc = NULL;
- return SafepointEntry(); // Invalid entry.
- }
- *pc = code_->instruction_start() + table_.GetPcOffset(index_);
- SafepointEntry entry = table_.GetEntry(index_);
- FindNextIndex();
- return entry;
- }
-
- private:
- void FindNextIndex() {
- ASSERT(index_ < limit_);
- while (++index_ < limit_) {
- if (table_.GetEntry(index_).deoptimization_index() !=
- Safepoint::kNoDeoptimizationIndex) {
- return;
- }
- }
- }
-
- Code* code_;
- SafepointTable table_;
- // Index of next deoptimization entry. If negative after calling
- // FindNextIndex, there are no more, and Next will return an invalid
- // SafepointEntry.
- int index_;
- // Table length.
- int limit_;
-};
-
-
-void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
- // TODO(1276): Implement.
+ return Assembler::kCallInstructionLength;
}
@@ -119,84 +59,34 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
// code patching below, and is not needed any more.
code->InvalidateRelocation();
- // For each return after a safepoint insert a absolute call to the
+ // For each LLazyBailout instruction insert a absolute call to the
// corresponding deoptimization entry, or a short call to an absolute
// jump if space is short. The absolute jumps are put in a table just
// before the safepoint table (space was allocated there when the Code
// object was created, if necessary).
Address instruction_start = function->code()->instruction_start();
- Address jump_table_address =
- instruction_start + function->code()->safepoint_table_offset();
-#ifdef DEBUG
- Address previous_pc = instruction_start;
-#endif
-
- SafepointTableDeoptimiztionEntryIterator deoptimizations(function->code());
- Address entry_pc = NULL;
-
- SafepointEntry current_entry = deoptimizations.Next(&entry_pc);
- while (current_entry.is_valid()) {
- int gap_code_size = current_entry.gap_code_size();
- unsigned deoptimization_index = current_entry.deoptimization_index();
-
#ifdef DEBUG
- // Destroy the code which is not supposed to run again.
- ZapCodeRange(previous_pc, entry_pc);
+ Address prev_call_address = NULL;
#endif
+ DeoptimizationInputData* deopt_data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+ for (int i = 0; i < deopt_data->DeoptCount(); i++) {
+ if (deopt_data->Pc(i)->value() == -1) continue;
// Position where Call will be patched in.
- Address call_address = entry_pc + gap_code_size;
- // End of call instruction, if using a direct call to a 64-bit address.
- Address call_end_address =
- call_address + MacroAssembler::kCallInstructionLength;
-
- // Find next deoptimization entry, if any.
- Address next_pc = NULL;
- SafepointEntry next_entry = deoptimizations.Next(&next_pc);
-
- if (!next_entry.is_valid() || next_pc >= call_end_address) {
- // Room enough to write a long call instruction.
- CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
- patcher.masm()->Call(GetDeoptimizationEntry(deoptimization_index, LAZY),
- RelocInfo::NONE);
+ Address call_address = instruction_start + deopt_data->Pc(i)->value();
+ // There is room enough to write a long call instruction because we pad
+ // LLazyBailout instructions with nops if necessary.
+ CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
+ patcher.masm()->Call(GetDeoptimizationEntry(i, LAZY), RelocInfo::NONE);
+ ASSERT(prev_call_address == NULL ||
+ call_address >= prev_call_address + patch_size());
+ ASSERT(call_address + patch_size() <= code->instruction_end());
#ifdef DEBUG
- previous_pc = call_end_address;
+ prev_call_address = call_address;
#endif
- } else {
- // Not room enough for a long Call instruction. Write a short call
- // instruction to a long jump placed elsewhere in the code.
-#ifdef DEBUG
- Address short_call_end_address =
- call_address + MacroAssembler::kShortCallInstructionLength;
-#endif
- ASSERT(next_pc >= short_call_end_address);
-
- // Write jump in jump-table.
- jump_table_address -= MacroAssembler::kJumpInstructionLength;
- CodePatcher jump_patcher(jump_table_address,
- MacroAssembler::kJumpInstructionLength);
- jump_patcher.masm()->Jump(
- GetDeoptimizationEntry(deoptimization_index, LAZY),
- RelocInfo::NONE);
-
- // Write call to jump at call_offset.
- CodePatcher call_patcher(call_address,
- MacroAssembler::kShortCallInstructionLength);
- call_patcher.masm()->call(jump_table_address);
-#ifdef DEBUG
- previous_pc = short_call_end_address;
-#endif
- }
-
- // Continue with next deoptimization entry.
- current_entry = next_entry;
- entry_pc = next_pc;
}
-#ifdef DEBUG
- // Destroy the code which is not supposed to run again.
- ZapCodeRange(previous_pc, jump_table_address);
-#endif
Isolate* isolate = code->GetIsolate();
// Add the deoptimizing code to the list.
@@ -217,22 +107,25 @@ void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
PrintF("[forced deoptimization: ");
function->PrintName();
PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
-#ifdef DEBUG
- if (FLAG_print_code) {
- code->PrintLn();
- }
-#endif
}
}
+static const byte kJnsInstruction = 0x79;
+static const byte kJnsOffset = 0x1f;
+static const byte kJaeInstruction = 0x73;
+static const byte kJaeOffset = 0x07;
+static const byte kCallInstruction = 0xe8;
+static const byte kNopByteOne = 0x66;
+static const byte kNopByteTwo = 0x90;
+
void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
Address pc_after,
Code* check_code,
Code* replacement_code) {
Address call_target_address = pc_after - kIntSize;
- ASSERT(check_code->entry() ==
- Assembler::target_address_at(call_target_address));
+ ASSERT_EQ(check_code->entry(),
+ Assembler::target_address_at(call_target_address));
// The stack check code matches the pattern:
//
// cmp rsp, <limit>
@@ -250,11 +143,16 @@ void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
// test rax, <loop nesting depth>
// ok:
//
- ASSERT(*(call_target_address - 3) == 0x73 && // jae
- *(call_target_address - 2) == 0x07 && // offset
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x90; // nop
- *(call_target_address - 2) = 0x90; // nop
+ if (FLAG_count_based_interrupts) {
+ ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
+ } else {
+ ASSERT_EQ(kJaeInstruction, *(call_target_address - 3));
+ ASSERT_EQ(kJaeOffset, *(call_target_address - 2));
+ }
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ *(call_target_address - 3) = kNopByteOne;
+ *(call_target_address - 2) = kNopByteTwo;
Assembler::set_target_address_at(call_target_address,
replacement_code->entry());
@@ -272,11 +170,16 @@ void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
Assembler::target_address_at(call_target_address));
// Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
// restore the conditional branch.
- ASSERT(*(call_target_address - 3) == 0x90 && // nop
- *(call_target_address - 2) == 0x90 && // nop
- *(call_target_address - 1) == 0xe8); // call
- *(call_target_address - 3) = 0x73; // jae
- *(call_target_address - 2) = 0x07; // offset
+ ASSERT_EQ(kNopByteOne, *(call_target_address - 3));
+ ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
+ if (FLAG_count_based_interrupts) {
+ *(call_target_address - 3) = kJnsInstruction;
+ *(call_target_address - 2) = kJnsOffset;
+ } else {
+ *(call_target_address - 3) = kJaeInstruction;
+ *(call_target_address - 2) = kJaeOffset;
+ }
Assembler::set_target_address_at(call_target_address,
check_code->entry());
@@ -321,12 +224,13 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
ASSERT(Translation::BEGIN == opcode);
USE(opcode);
int count = iterator.Next();
+ iterator.Skip(1); // Drop JS frame count.
ASSERT(count == 1);
USE(count);
opcode = static_cast<Translation::Opcode>(iterator.Next());
USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+ ASSERT(Translation::JS_FRAME == opcode);
unsigned node_id = iterator.Next();
USE(node_id);
ASSERT(node_id == ast_id);
@@ -362,9 +266,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_ = new FrameDescription*[1];
output_[0] = new(output_frame_size) FrameDescription(
output_frame_size, function_);
-#ifdef DEBUG
- output_[0]->SetKind(Code::OPTIMIZED_FUNCTION);
-#endif
+ output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
// Clear the incoming parameters in the optimized frame to avoid
// confusing the garbage collector.
@@ -429,7 +331,7 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
output_[0] = input_;
output_[0]->SetPc(reinterpret_cast<intptr_t>(from_));
} else {
- // Setup the frame pointer and the context pointer.
+ // Set up the frame pointer and the context pointer.
output_[0]->SetRegister(rbp.code(), input_->GetRegister(rbp.code()));
output_[0]->SetRegister(rsi.code(), input_->GetRegister(rsi.code()));
@@ -453,13 +355,219 @@ void Deoptimizer::DoComputeOsrOutputFrame() {
}
-void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
- int frame_index) {
- // Read the ast node id, function, and frame height for this output frame.
- Translation::Opcode opcode =
- static_cast<Translation::Opcode>(iterator->Next());
- USE(opcode);
- ASSERT(Translation::FRAME == opcode);
+void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
+
+ // Arguments adaptor can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ intptr_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // A marker value is used in place of the context.
+ output_offset -= kPointerSize;
+ intptr_t context = reinterpret_cast<intptr_t>(
+ Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+ output_frame->SetFrameSlot(output_offset, context);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; context (adaptor sentinel)\n",
+ top_address + output_offset, output_offset, context);
+ }
+
+ // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(function);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; function\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* adaptor_trampoline =
+ builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
+ intptr_t pc_value = reinterpret_cast<intptr_t>(
+ adaptor_trampoline->instruction_start() +
+ isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
+ output_frame->SetPc(pc_value);
+}
+
+
+void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
+ int frame_index) {
+ JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
+ unsigned height = iterator->Next();
+ unsigned height_in_bytes = height * kPointerSize;
+ if (FLAG_trace_deopt) {
+ PrintF(" translating construct stub => height=%d\n", height_in_bytes);
+ }
+
+ unsigned fixed_frame_size = 6 * kPointerSize;
+ unsigned output_frame_size = height_in_bytes + fixed_frame_size;
+
+ // Allocate and store the output frame description.
+ FrameDescription* output_frame =
+ new(output_frame_size) FrameDescription(output_frame_size, function);
+ output_frame->SetFrameType(StackFrame::CONSTRUCT);
+
+ // Construct stub can not be topmost or bottommost.
+ ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
+ ASSERT(output_[frame_index] == NULL);
+ output_[frame_index] = output_frame;
+
+ // The top address of the frame is computed from the previous
+ // frame's top and this frame's size.
+ intptr_t top_address;
+ top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
+ output_frame->SetTop(top_address);
+
+ // Compute the incoming parameter translation.
+ int parameter_count = height;
+ unsigned output_offset = output_frame_size;
+ for (int i = 0; i < parameter_count; ++i) {
+ output_offset -= kPointerSize;
+ DoTranslateCommand(iterator, frame_index, output_offset);
+ }
+
+ // Read caller's PC from the previous frame.
+ output_offset -= kPointerSize;
+ intptr_t callers_pc = output_[frame_index - 1]->GetPc();
+ output_frame->SetFrameSlot(output_offset, callers_pc);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's pc\n",
+ top_address + output_offset, output_offset, callers_pc);
+ }
+
+ // Read caller's FP from the previous frame, and set this frame's FP.
+ output_offset -= kPointerSize;
+ intptr_t value = output_[frame_index - 1]->GetFp();
+ output_frame->SetFrameSlot(output_offset, value);
+ intptr_t fp_value = top_address + output_offset;
+ output_frame->SetFp(fp_value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; caller's fp\n",
+ fp_value, output_offset, value);
+ }
+
+ // The context can be gotten from the previous frame.
+ output_offset -= kPointerSize;
+ value = output_[frame_index - 1]->GetContext();
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; context\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // A marker value is used in place of the function.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; function (construct sentinel)\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ // Number of incoming arguments.
+ output_offset -= kPointerSize;
+ value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; argc (%d)\n",
+ top_address + output_offset, output_offset, value, height - 1);
+ }
+
+ // The newly allocated object was passed as receiver in the artificial
+ // constructor stub environment created by HEnvironment::CopyForInlining().
+ output_offset -= kPointerSize;
+ value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
+ output_frame->SetFrameSlot(output_offset, value);
+ if (FLAG_trace_deopt) {
+ PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
+ V8PRIxPTR " ; allocated receiver\n",
+ top_address + output_offset, output_offset, value);
+ }
+
+ ASSERT(0 == output_offset);
+
+ Builtins* builtins = isolate_->builtins();
+ Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
+ intptr_t pc = reinterpret_cast<intptr_t>(
+ construct_stub->instruction_start() +
+ isolate_->heap()->construct_stub_deopt_pc_offset()->value());
+ output_frame->SetPc(pc);
+}
+
+
+void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
+ int frame_index) {
int node_id = iterator->Next();
JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
unsigned height = iterator->Next();
@@ -479,9 +587,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
// Allocate and store the output frame description.
FrameDescription* output_frame =
new(output_frame_size) FrameDescription(output_frame_size, function);
-#ifdef DEBUG
- output_frame->SetKind(Code::FUNCTION);
-#endif
+ output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
bool is_bottommost = (0 == frame_index);
bool is_topmost = (output_count_ - 1 == frame_index);
@@ -569,6 +675,7 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator,
value = reinterpret_cast<intptr_t>(function->context());
}
output_frame->SetFrameSlot(output_offset, value);
+ output_frame->SetContext(value);
if (is_topmost) output_frame->SetRegister(rsi.code(), value);
if (FLAG_trace_deopt) {
PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
diff --git a/src/3rdparty/v8/src/x64/disasm-x64.cc b/src/3rdparty/v8/src/x64/disasm-x64.cc
index 1b8871f..7ed81b4 100644
--- a/src/3rdparty/v8/src/x64/disasm-x64.cc
+++ b/src/3rdparty/v8/src/x64/disasm-x64.cc
@@ -34,6 +34,7 @@
#if defined(V8_TARGET_ARCH_X64)
#include "disasm.h"
+#include "lazy-instance.h"
namespace disasm {
@@ -109,6 +110,7 @@ static const ByteMnemonic zero_operands_instr[] = {
{ 0xC3, UNSET_OP_ORDER, "ret" },
{ 0xC9, UNSET_OP_ORDER, "leave" },
{ 0xF4, UNSET_OP_ORDER, "hlt" },
+ { 0xFC, UNSET_OP_ORDER, "cld" },
{ 0xCC, UNSET_OP_ORDER, "int3" },
{ 0x60, UNSET_OP_ORDER, "pushad" },
{ 0x61, UNSET_OP_ORDER, "popad" },
@@ -268,7 +270,8 @@ void InstructionTable::AddJumpConditionalShort() {
}
-static InstructionTable instruction_table;
+static v8::internal::LazyInstance<InstructionTable>::type instruction_table =
+ LAZY_INSTANCE_INITIALIZER;
static InstructionDesc cmov_instructions[16] = {
@@ -312,7 +315,8 @@ class DisassemblerX64 {
rex_(0),
operand_size_(0),
group_1_prefix_(0),
- byte_size_operand_(false) {
+ byte_size_operand_(false),
+ instruction_table_(instruction_table.Pointer()) {
tmp_buffer_[0] = '\0';
}
@@ -341,6 +345,7 @@ class DisassemblerX64 {
byte group_1_prefix_; // 0xF2, 0xF3, or (if no group 1 prefix is present) 0.
// Byte size operand override.
bool byte_size_operand_;
+ const InstructionTable* const instruction_table_;
void setRex(byte rex) {
ASSERT_EQ(0x40, rex & 0xF0);
@@ -910,15 +915,19 @@ int DisassemblerX64::RegisterFPUInstruction(int escape_opcode,
switch (modrm_byte) {
case 0xE0: mnem = "fchs"; break;
case 0xE1: mnem = "fabs"; break;
+ case 0xE3: mnem = "fninit"; break;
case 0xE4: mnem = "ftst"; break;
case 0xE8: mnem = "fld1"; break;
case 0xEB: mnem = "fldpi"; break;
case 0xED: mnem = "fldln2"; break;
case 0xEE: mnem = "fldz"; break;
+ case 0xF0: mnem = "f2xm1"; break;
case 0xF1: mnem = "fyl2x"; break;
+ case 0xF2: mnem = "fptan"; break;
case 0xF5: mnem = "fprem1"; break;
case 0xF7: mnem = "fincstp"; break;
case 0xF8: mnem = "fprem"; break;
+ case 0xFD: mnem = "fscale"; break;
case 0xFE: mnem = "fsin"; break;
case 0xFF: mnem = "fcos"; break;
default: UnimplementedInstruction();
@@ -1034,7 +1043,18 @@ int DisassemblerX64::TwoByteOpcodeInstruction(byte* data) {
}
} else {
get_modrm(*current, &mod, &regop, &rm);
- if (opcode == 0x28) {
+ if (opcode == 0x1f) {
+ current++;
+ if (rm == 4) { // SIB byte present.
+ current++;
+ }
+ if (mod == 1) { // Byte displacement.
+ current += 1;
+ } else if (mod == 2) { // 32-bit displacement.
+ current += 4;
+ } // else no immediate displacement.
+ AppendToBuffer("nop");
+ } else if (opcode == 0x28) {
AppendToBuffer("movapd %s, ", NameOfXMMRegister(regop));
current += PrintRightXMMOperand(current);
} else if (opcode == 0x29) {
@@ -1178,7 +1198,7 @@ int DisassemblerX64::TwoByteOpcodeInstruction(byte* data) {
int mod, regop, rm;
get_modrm(*current, &mod, &regop, &rm);
current++;
- if (regop == 4) { // SIB byte present.
+ if (rm == 4) { // SIB byte present.
current++;
}
if (mod == 1) { // Byte displacement.
@@ -1322,7 +1342,7 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
data++;
}
- const InstructionDesc& idesc = instruction_table.Get(current);
+ const InstructionDesc& idesc = instruction_table_->Get(current);
byte_size_operand_ = idesc.byte_size_operation;
switch (idesc.type) {
case ZERO_OPERANDS_INSTR:
diff --git a/src/3rdparty/v8/src/x64/frames-x64.h b/src/3rdparty/v8/src/x64/frames-x64.h
index 7012c76..3e3d63d 100644
--- a/src/3rdparty/v8/src/x64/frames-x64.h
+++ b/src/3rdparty/v8/src/x64/frames-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -31,32 +31,32 @@
namespace v8 {
namespace internal {
-static const int kNumRegs = 16;
-static const RegList kJSCallerSaved =
+const int kNumRegs = 16;
+const RegList kJSCallerSaved =
1 << 0 | // rax
1 << 1 | // rcx
1 << 2 | // rdx
1 << 3 | // rbx - used as a caller-saved register in JavaScript code
1 << 7; // rdi - callee function
-static const int kNumJSCallerSaved = 5;
+const int kNumJSCallerSaved = 5;
typedef Object* JSCallerSavedBuffer[kNumJSCallerSaved];
// Number of registers for which space is reserved in safepoints.
-static const int kNumSafepointRegisters = 16;
+const int kNumSafepointRegisters = 16;
// ----------------------------------------------------
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kContextOffset = 1 * kPointerSize;
- static const int kFPOffset = 2 * kPointerSize;
- static const int kStateOffset = 3 * kPointerSize;
- static const int kPCOffset = 4 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kCodeOffset = 1 * kPointerSize;
+ static const int kStateOffset = 2 * kPointerSize;
+ static const int kContextOffset = 3 * kPointerSize;
+ static const int kFPOffset = 4 * kPointerSize;
- static const int kSize = kPCOffset + kPointerSize;
+ static const int kSize = kFPOffset + kPointerSize;
};
@@ -87,6 +87,9 @@ class ExitFrameConstants : public AllStatic {
class StandardFrameConstants : public AllStatic {
public:
+ // Fixed part of the frame consists of return address, caller fp,
+ // context and function.
+ static const int kFixedFrameSize = 4 * kPointerSize;
static const int kExpressionsOffset = -3 * kPointerSize;
static const int kMarkerOffset = -2 * kPointerSize;
static const int kContextOffset = -1 * kPointerSize;
@@ -112,6 +115,8 @@ class JavaScriptFrameConstants : public AllStatic {
class ArgumentsAdaptorFrameConstants : public AllStatic {
public:
static const int kLengthOffset = StandardFrameConstants::kExpressionsOffset;
+ static const int kFrameSize =
+ StandardFrameConstants::kFixedFrameSize + kPointerSize;
};
diff --git a/src/3rdparty/v8/src/x64/full-codegen-x64.cc b/src/3rdparty/v8/src/x64/full-codegen-x64.cc
index 815e3b4..4138a16 100644
--- a/src/3rdparty/v8/src/x64/full-codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/full-codegen-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,6 +34,7 @@
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
+#include "isolate-inl.h"
#include "parser.h"
#include "scopes.h"
#include "stub-cache.h"
@@ -106,17 +107,19 @@ class JumpPatchSite BASE_EMBEDDED {
// formal parameter count expected by the function.
//
// The live registers are:
-// o rdi: the JS function object being called (ie, ourselves)
+// o rdi: the JS function object being called (i.e. ourselves)
// o rsi: our context
// o rbp: our caller's frame pointer
// o rsp: stack pointer (pointing to return address)
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-x64.h for its layout.
-void FullCodeGenerator::Generate(CompilationInfo* info) {
- ASSERT(info_ == NULL);
- info_ = info;
- scope_ = info->scope();
+void FullCodeGenerator::Generate() {
+ CompilationInfo* info = info_;
+ handler_table_ =
+ isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
+ profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
SetFunctionPosition(function());
Comment cmnt(masm_, "[ function compiled by full code generator");
@@ -131,7 +134,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// with undefined when called as functions (without an explicit
// receiver object). rcx is zero for method calls and non-zero for
// function calls.
- if (info->is_strict_mode() || info->is_native()) {
+ if (!info->is_classic_mode() || info->is_native()) {
Label ok;
__ testq(rcx, rcx);
__ j(zero, &ok, Label::kNear);
@@ -168,13 +171,12 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Possibly allocate a local context.
int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ Allocate local context");
// Argument to NewContext is the function, which is still in rdi.
__ push(rdi);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
@@ -225,9 +227,15 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// function, receiver address, parameter count.
// The stub will rewrite receiver and parameter count if the previous
// stack frame was an arguments adapter frame.
- ArgumentsAccessStub stub(
- is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
- : ArgumentsAccessStub::NEW_NON_STRICT_SLOW);
+ ArgumentsAccessStub::Type type;
+ if (!is_classic_mode()) {
+ type = ArgumentsAccessStub::NEW_STRICT;
+ } else if (function()->has_duplicate_parameters()) {
+ type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
+ } else {
+ type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
+ }
+ ArgumentsAccessStub stub(type);
__ CallStub(&stub);
SetVar(arguments, rax, rbx, rdx);
@@ -249,11 +257,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- int ignored = 0;
VariableProxy* proxy = scope()->function();
ASSERT(proxy->var()->mode() == CONST ||
proxy->var()->mode() == CONST_HARMONY);
- EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
+ ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
+ EmitDeclaration(proxy, proxy->var()->mode(), NULL);
}
VisitDeclarations(scope()->declarations());
}
@@ -289,13 +297,60 @@ void FullCodeGenerator::ClearAccumulator() {
}
-void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
+void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
+ __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
+ __ SmiAddConstant(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(-delta));
+}
+
+
+void FullCodeGenerator::EmitProfilingCounterReset() {
+ int reset_value = FLAG_interrupt_budget;
+ if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
+ // Self-optimization is a one-off thing; if it fails, don't try again.
+ reset_value = Smi::kMaxValue;
+ }
+ if (isolate()->IsDebuggerActive()) {
+ // Detect debug break requests as soon as possible.
+ reset_value = 10;
+ }
+ __ movq(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
+ __ movq(kScratchRegister,
+ reinterpret_cast<uint64_t>(Smi::FromInt(reset_value)),
+ RelocInfo::NONE);
+ __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ kScratchRegister);
+}
+
+
+static const int kMaxBackEdgeWeight = 127;
+static const int kBackEdgeDistanceDivisor = 162;
+
+
+void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
+ Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
Label ok;
- __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
- __ j(above_equal, &ok, Label::kNear);
- StackCheckStub stub;
- __ CallStub(&stub);
+
+ if (FLAG_count_based_interrupts) {
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ ASSERT(back_edge_target->is_bound());
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance / kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ __ j(positive, &ok, Label::kNear);
+ InterruptStub stub;
+ __ CallStub(&stub);
+ } else {
+ __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+ __ j(above_equal, &ok, Label::kNear);
+ StackCheckStub stub;
+ __ CallStub(&stub);
+ }
+
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
@@ -308,6 +363,10 @@ void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
ASSERT(loop_depth() > 0);
__ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
+ if (FLAG_count_based_interrupts) {
+ EmitProfilingCounterReset();
+ }
+
__ bind(&ok);
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
// Record a mapping of the OSR id to this PC. This is used if the OSR
@@ -327,6 +386,31 @@ void FullCodeGenerator::EmitReturnSequence() {
__ push(rax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (info_->ShouldSelfOptimize()) {
+ weight = FLAG_interrupt_budget / FLAG_self_opt_count;
+ } else if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(kMaxBackEdgeWeight,
+ Max(1, distance = kBackEdgeDistanceDivisor));
+ }
+ EmitProfilingCounterDecrement(weight);
+ Label ok;
+ __ j(positive, &ok, Label::kNear);
+ __ push(rax);
+ if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
+ __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
+ } else {
+ InterruptStub stub;
+ __ CallStub(&stub);
+ }
+ __ pop(rax);
+ EmitProfilingCounterReset();
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
@@ -671,17 +755,16 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
VariableMode mode,
- FunctionLiteral* function,
- int* global_count) {
+ FunctionLiteral* function) {
// If it was not possible to allocate the variable at compile time, we
// need to "declare" it at runtime to make sure it actually exists in the
// local context.
Variable* variable = proxy->var();
- bool binding_needs_init =
- mode == CONST || mode == CONST_HARMONY || mode == LET;
+ bool binding_needs_init = (function == NULL) &&
+ (mode == CONST || mode == CONST_HARMONY || mode == LET);
switch (variable->location()) {
case Variable::UNALLOCATED:
- ++(*global_count);
+ ++global_count_;
break;
case Variable::PARAMETER:
@@ -762,9 +845,6 @@ void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
-
-
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
__ push(rsi); // The context is the first argument.
@@ -826,7 +906,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
+ CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
patch_site.EmitPatchInfo();
__ testq(rax, rax);
@@ -878,6 +958,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ cmpq(rax, null_value);
__ j(equal, &exit);
+ PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
+
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(rax, &convert);
@@ -899,47 +981,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
// guarantee cache validity, call the runtime system to check cache
// validity or get the property names in a fixed array.
- Label next;
- Register empty_fixed_array_value = r8;
- __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
- Register empty_descriptor_array_value = r9;
- __ LoadRoot(empty_descriptor_array_value,
- Heap::kEmptyDescriptorArrayRootIndex);
- __ movq(rcx, rax);
- __ bind(&next);
-
- // Check that there are no elements. Register rcx contains the
- // current JS object we've reached through the prototype chain.
- __ cmpq(empty_fixed_array_value,
- FieldOperand(rcx, JSObject::kElementsOffset));
- __ j(not_equal, &call_runtime);
-
- // Check that instance descriptors are not empty so that we can
- // check for an enum cache. Leave the map in rbx for the subsequent
- // prototype load.
- __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
- __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
- __ JumpIfSmi(rdx, &call_runtime);
-
- // Check that there is an enum cache in the non-empty instance
- // descriptors (rdx). This is the case if the next enumeration
- // index field does not contain a smi.
- __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
- __ JumpIfSmi(rdx, &call_runtime);
-
- // For all objects but the receiver, check that the cache is empty.
- Label check_prototype;
- __ cmpq(rcx, rax);
- __ j(equal, &check_prototype, Label::kNear);
- __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- __ cmpq(rdx, empty_fixed_array_value);
- __ j(not_equal, &call_runtime);
-
- // Load the prototype from the map and loop if non-null.
- __ bind(&check_prototype);
- __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
- __ cmpq(rcx, null_value);
- __ j(not_equal, &next);
+ __ CheckEnumCache(null_value, &call_runtime);
// The enum cache is valid. Load the map of the object being
// iterated over and use the cache for the iteration.
@@ -966,7 +1008,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
__ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
- // Setup the four remaining stack slots.
+ // Set up the four remaining stack slots.
__ push(rax); // Map.
__ push(rdx); // Enumeration cache.
__ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
@@ -977,6 +1019,16 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a fixed array in register rax. Iterate through that.
Label non_proxy;
__ bind(&fixed_array);
+
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(
+ Handle<Object>(
+ Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
+ RecordTypeFeedbackCell(stmt->PrepareId(), cell);
+ __ LoadHeapObject(rbx, cell);
+ __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
+ Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
+
__ Move(rbx, Smi::FromInt(1)); // Smi indicates slow check
__ movq(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object
STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
@@ -991,6 +1043,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Push(Smi::FromInt(0)); // Initial index.
// Generate code for doing the condition check.
+ PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
__ bind(&loop);
__ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
__ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
@@ -1036,7 +1089,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ movq(result_register(), rbx);
// Perform the assignment as if via '='.
{ EffectContext context(this);
- EmitAssignment(stmt->each(), stmt->AssignmentId());
+ EmitAssignment(stmt->each());
}
// Generate code for the body of the loop.
@@ -1047,7 +1100,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(loop_statement.continue_label());
__ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
- EmitStackCheck(stmt);
+ EmitStackCheck(stmt, &loop);
__ jmp(&loop);
// Remove the pointers stored on the stack.
@@ -1055,6 +1108,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ addq(rsp, Immediate(5 * kPointerSize));
// Exit and decrement the loop depth.
+ PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
__ bind(&exit);
decrement_loop_depth();
}
@@ -1073,7 +1127,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode_flag());
+ FastNewClosureStub stub(info->language_mode());
__ Push(info);
__ CallStub(&stub);
} else {
@@ -1145,13 +1199,13 @@ void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
// All extension objects were empty and it is safe to use a global
// load IC call.
- __ movq(rax, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ movq(rax, GlobalObjectOperand());
__ Move(rcx, var->name());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
- __ call(ic, mode);
+ CallIC(ic, mode);
}
@@ -1230,9 +1284,9 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
// Use inline caching. Variable name is passed in rcx and the global
// object on the stack.
__ Move(rcx, var->name());
- __ movq(rax, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
context()->Plug(rax);
break;
}
@@ -1241,27 +1295,63 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
case Variable::LOCAL:
case Variable::CONTEXT: {
Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
- if (!var->binding_needs_init()) {
- context()->Plug(var);
- } else {
- // Let and const need a read barrier.
- Label done;
- GetVar(rax, var);
- __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
- __ j(not_equal, &done, Label::kNear);
- if (var->mode() == LET || var->mode() == CONST_HARMONY) {
- // Throw a reference error when using an uninitialized let/const
- // binding in harmony mode.
- __ Push(var->name());
- __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ if (var->binding_needs_init()) {
+ // var->scope() may be NULL when the proxy is located in eval code and
+ // refers to a potential outside binding. Currently those bindings are
+ // always looked up dynamically, i.e. in that case
+ // var->location() == LOOKUP.
+ // always holds.
+ ASSERT(var->scope() != NULL);
+
+ // Check if the binding really needs an initialization check. The check
+ // can be skipped in the following situation: we have a LET or CONST
+ // binding in harmony mode, both the Variable and the VariableProxy have
+ // the same declaration scope (i.e. they are both in global code, in the
+ // same function or in the same eval code) and the VariableProxy is in
+ // the source physically located after the initializer of the variable.
+ //
+ // We cannot skip any initialization checks for CONST in non-harmony
+ // mode because const variables may be declared but never initialized:
+ // if (false) { const x; }; var y = x;
+ //
+ // The condition on the declaration scopes is a conservative check for
+ // nested functions that access a binding and are called before the
+ // binding is initialized:
+ // function() { f(); let x = 1; function f() { x = 2; } }
+ //
+ bool skip_init_check;
+ if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
+ skip_init_check = false;
} else {
- // Uninitalized const bindings outside of harmony mode are unholed.
- ASSERT(var->mode() == CONST);
- __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ // Check that we always have valid source position.
+ ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
+ ASSERT(proxy->position() != RelocInfo::kNoPosition);
+ skip_init_check = var->mode() != CONST &&
+ var->initializer_position() < proxy->position();
+ }
+
+ if (!skip_init_check) {
+ // Let and const need a read barrier.
+ Label done;
+ GetVar(rax, var);
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &done, Label::kNear);
+ if (var->mode() == LET || var->mode() == CONST_HARMONY) {
+ // Throw a reference error when using an uninitialized let/const
+ // binding in harmony mode.
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ } else {
+ // Uninitalized const bindings outside of harmony mode are unholed.
+ ASSERT(var->mode() == CONST);
+ __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
+ }
+ __ bind(&done);
+ context()->Plug(rax);
+ break;
}
- __ bind(&done);
- context()->Plug(rax);
}
+ context()->Plug(var);
break;
}
@@ -1337,12 +1427,22 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
}
+void FullCodeGenerator::EmitAccessor(Expression* expression) {
+ if (expression == NULL) {
+ __ PushRoot(Heap::kNullValueRootIndex);
+ } else {
+ VisitForStackValue(expression);
+ }
+}
+
+
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
+ Handle<FixedArray> constant_properties = expr->constant_properties();
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
- __ Push(expr->constant_properties());
+ __ Push(constant_properties);
int flags = expr->fast_elements()
? ObjectLiteral::kFastElements
: ObjectLiteral::kNoFlags;
@@ -1350,10 +1450,15 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
? ObjectLiteral::kHasFunction
: ObjectLiteral::kNoFlags;
__ Push(Smi::FromInt(flags));
+ int properties_count = constant_properties->length() / 2;
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ __ CallStub(&stub);
}
// If result_saved is true the result is on top of the stack. If
@@ -1365,6 +1470,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// marked expressions, no store code is emitted.
expr->CalculateEmitStore();
+ AccessorTable accessor_table(isolate()->zone());
for (int i = 0; i < expr->properties()->length(); i++) {
ObjectLiteral::Property* property = expr->properties()->at(i);
if (property->IsCompileTimeValue()) continue;
@@ -1387,10 +1493,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
VisitForAccumulatorValue(value);
__ Move(rcx, key->handle());
__ movq(rdx, Operand(rsp, 0));
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, key->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1409,19 +1515,28 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Drop(3);
}
break;
- case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
- __ push(Operand(rsp, 0)); // Duplicate receiver.
- VisitForStackValue(key);
- __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
- Smi::FromInt(1) :
- Smi::FromInt(0));
- VisitForStackValue(value);
- __ CallRuntime(Runtime::kDefineAccessor, 4);
+ accessor_table.lookup(key)->second->getter = value;
+ break;
+ case ObjectLiteral::Property::SETTER:
+ accessor_table.lookup(key)->second->setter = value;
break;
}
}
+ // Emit code to define accessors, using only a single call to the runtime for
+ // each pair of corresponding getters and setters.
+ for (AccessorTable::Iterator it = accessor_table.begin();
+ it != accessor_table.end();
+ ++it) {
+ __ push(Operand(rsp, 0)); // Duplicate receiver.
+ VisitForStackValue(it->first);
+ EmitAccessor(it->second->getter);
+ EmitAccessor(it->second->setter);
+ __ Push(Smi::FromInt(NONE));
+ __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
+ }
+
if (expr->has_function()) {
ASSERT(result_saved);
__ push(Operand(rsp, 0));
@@ -1445,6 +1560,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT_EQ(2, constant_elements->length());
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
@@ -1452,12 +1568,16 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(constant_elements);
- if (constant_elements_values->map() ==
- isolate()->heap()->fixed_cow_array_map()) {
+ Heap* heap = isolate()->heap();
+ if (has_constant_fast_elements &&
+ constant_elements_values->map() == heap->fixed_cow_array_map()) {
+ // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // change, so it's possible to specialize the stub in advance.
+ __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
FastCloneShallowArrayStub stub(
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
+ FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
+ length);
__ CallStub(&stub);
- __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
} else if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1466,10 +1586,11 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
ASSERT(constant_elements_kind == FAST_ELEMENTS ||
constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
FLAG_smi_only_arrays);
- FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ // If the elements are already FAST_ELEMENTS, the boilerplate cannot
+ // change, so it's possible to specialize the stub in advance.
+ FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
+ ? FastCloneShallowArrayStub::CLONE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
__ CallStub(&stub);
}
@@ -1493,61 +1614,28 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
VisitForAccumulatorValue(subexpr);
- // Store the subexpression value in the array's elements.
- __ movq(r8, Operand(rsp, 0)); // Copy of array literal.
- __ movq(rdi, FieldOperand(r8, JSObject::kMapOffset));
- __ movq(rbx, FieldOperand(r8, JSObject::kElementsOffset));
- int offset = FixedArray::kHeaderSize + (i * kPointerSize);
-
- Label element_done;
- Label double_elements;
- Label smi_element;
- Label slow_elements;
- Label fast_elements;
- __ CheckFastElements(rdi, &double_elements);
-
- // FAST_SMI_ONLY_ELEMENTS or FAST_ELEMENTS
- __ JumpIfSmi(result_register(), &smi_element);
- __ CheckFastSmiOnlyElements(rdi, &fast_elements);
-
- // Store into the array literal requires a elements transition. Call into
- // the runtime.
- __ bind(&slow_elements);
- __ push(r8); // Copy of array literal.
- __ Push(Smi::FromInt(i));
- __ push(result_register());
- __ Push(Smi::FromInt(NONE)); // PropertyAttributes
- __ Push(Smi::FromInt(strict_mode_flag())); // Strict mode.
- __ CallRuntime(Runtime::kSetProperty, 5);
- __ jmp(&element_done);
-
- // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
- __ bind(&double_elements);
- __ movq(rcx, Immediate(i));
- __ StoreNumberToDoubleElements(result_register(),
- rbx,
- rcx,
- xmm0,
- &slow_elements);
- __ jmp(&element_done);
-
- // Array literal has ElementsKind of FAST_ELEMENTS and value is an object.
- __ bind(&fast_elements);
- __ movq(FieldOperand(rbx, offset), result_register());
- // Update the write barrier for the array store.
- __ RecordWriteField(rbx, offset, result_register(), rcx,
- kDontSaveFPRegs,
- EMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- __ jmp(&element_done);
-
- // Array literal has ElementsKind of FAST_SMI_ONLY_ELEMENTS or
- // FAST_ELEMENTS, and value is Smi.
- __ bind(&smi_element);
- __ movq(FieldOperand(rbx, offset), result_register());
- // Fall through
-
- __ bind(&element_done);
+ if (constant_elements_kind == FAST_ELEMENTS) {
+ // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
+ // transition and don't need to call the runtime stub.
+ int offset = FixedArray::kHeaderSize + (i * kPointerSize);
+ __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
+ __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
+ // Store the subexpression value in the array's elements.
+ __ movq(FieldOperand(rbx, offset), result_register());
+ // Update the write barrier for the array store.
+ __ RecordWriteField(rbx, offset, result_register(), rcx,
+ kDontSaveFPRegs,
+ EMIT_REMEMBERED_SET,
+ INLINE_SMI_CHECK);
+ } else {
+ // Store the subexpression value in the array's elements.
+ __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
+ __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
+ __ Move(rcx, Smi::FromInt(i));
+ __ Move(rdx, Smi::FromInt(expr->literal_index()));
+ StoreArrayLiteralElementStub stub;
+ __ CallStub(&stub);
+ }
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
@@ -1678,14 +1766,14 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Literal* key = prop->key()->AsLiteral();
__ Move(rcx, key->handle());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, prop->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
}
@@ -1707,7 +1795,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&stub_call);
__ movq(rax, rcx);
BinaryOpStub stub(op, mode);
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
@@ -1756,13 +1844,13 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
__ pop(rdx);
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
context()->Plug(rax);
}
-void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
+void FullCodeGenerator::EmitAssignment(Expression* expr) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
@@ -1794,10 +1882,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ movq(rdx, rax);
__ pop(rax); // Restore value.
__ Move(rcx, prop->key()->AsLiteral()->handle());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
case KEYED_PROPERTY: {
@@ -1807,14 +1895,13 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ movq(rcx, rax);
__ pop(rdx);
__ pop(rax); // Restore value.
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic);
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic);
break;
}
}
- PrepareForBailoutForId(bailout_ast_id, TOS_REG);
context()->Plug(rax);
}
@@ -1824,11 +1911,11 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
if (var->IsUnallocated()) {
// Global var, const, or let.
__ Move(rcx, var->name());
- __ movq(rdx, var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ __ movq(rdx, GlobalObjectOperand());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
// Const initializers need a write barrier.
ASSERT(!var->IsParameter()); // No const parameters.
@@ -1858,7 +1945,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ push(rax); // Value.
__ push(rsi); // Context.
__ Push(var->name());
- __ Push(Smi::FromInt(strict_mode_flag()));
+ __ Push(Smi::FromInt(language_mode()));
__ CallRuntime(Runtime::kStoreContextSlot, 4);
} else {
ASSERT(var->IsStackAllocated() || var->IsContextSlot());
@@ -1901,7 +1988,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ push(rax); // Value.
__ push(rsi); // Context.
__ Push(var->name());
- __ Push(Smi::FromInt(strict_mode_flag()));
+ __ Push(Smi::FromInt(language_mode()));
__ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
@@ -1933,10 +2020,10 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
} else {
__ pop(rdx);
}
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -1973,10 +2060,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2010,6 +2097,14 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
}
+void FullCodeGenerator::CallIC(Handle<Code> code,
+ RelocInfo::Mode rmode,
+ unsigned ast_id) {
+ ic_total_count_++;
+ __ call(code, rmode, ast_id);
+}
+
+
void FullCodeGenerator::EmitCallWithIC(Call* expr,
Handle<Object> name,
RelocInfo::Mode mode) {
@@ -2027,7 +2122,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Call the IC initialization code.
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2060,7 +2155,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
Handle<Code> ic =
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2080,6 +2175,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, flags);
+ __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2100,14 +2196,13 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
// Push the receiver of the enclosing function and do runtime call.
__ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
- // Push the strict mode flag. In harmony mode every eval call
- // is a strict mode eval call.
- StrictModeFlag strict_mode =
- FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
- __ Push(Smi::FromInt(strict_mode));
+ // Push the language mode.
+ __ Push(Smi::FromInt(language_mode()));
+
+ // Push the start position of the scope the calls resides in.
+ __ Push(Smi::FromInt(scope()->start_position()));
- // Push the qml mode flag
- __ Push(Smi::FromInt(is_qml_mode()));
+ // Do the runtime call.
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
}
@@ -2152,6 +2247,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
+ __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2160,7 +2256,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
} else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Call to a global variable. Push global object as receiver for the
// call IC lookup.
- __ push(proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ push(GlobalObjectOperand());
EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
@@ -2254,9 +2350,23 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ Set(rax, arg_count);
__ movq(rdi, Operand(rsp, arg_count * kPointerSize));
- Handle<Code> construct_builtin =
- isolate()->builtins()->JSConstructCall();
- __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
+ // Record call targets in unoptimized code, but not in the snapshot.
+ CallFunctionFlags flags;
+ if (!Serializer::enabled()) {
+ flags = RECORD_CALL_TARGET;
+ Handle<Object> uninitialized =
+ TypeFeedbackCells::UninitializedSentinel(isolate());
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
+ RecordTypeFeedbackCell(expr->id(), cell);
+ __ Move(rbx, cell);
+ } else {
+ flags = NO_CALL_FUNCTION_FLAGS;
+ }
+
+ CallConstructStub stub(flags);
+ __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(rax);
}
@@ -2797,13 +2907,61 @@ void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 2);
+ ASSERT_NE(NULL, args->at(1)->AsLiteral());
+ Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
+
+ VisitForAccumulatorValue(args->at(0)); // Load the object.
+
+ Label runtime, done;
+ Register object = rax;
+ Register result = rax;
+ Register scratch = rcx;
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, scratch);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ movq(scratch, stamp);
+ __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2);
+#ifdef _WIN64
+ __ movq(rcx, object);
+ __ movq(rdx, index, RelocInfo::NONE);
+#else
+ __ movq(rdi, object);
+ __ movq(rsi, index, RelocInfo::NONE);
+#endif
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ bind(&done);
+ }
+ context()->Plug(rax);
+}
+
+
void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
// Load the arguments on the stack and call the runtime function.
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub;
+ MathPowStub stub(MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(rax);
}
@@ -2878,7 +3036,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Register object = rbx;
Register index = rax;
- Register scratch = rcx;
Register result = rdx;
__ pop(object);
@@ -2888,7 +3045,6 @@ void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
Label done;
StringCharCodeAtGenerator generator(object,
index,
- scratch,
result,
&need_conversion,
&need_conversion,
@@ -2926,8 +3082,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Register object = rbx;
Register index = rax;
- Register scratch1 = rcx;
- Register scratch2 = rdx;
+ Register scratch = rdx;
Register result = rax;
__ pop(object);
@@ -2937,8 +3092,7 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
Label done;
StringCharAtGenerator generator(object,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&need_conversion,
&need_conversion,
@@ -3017,6 +3171,18 @@ void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
+ // Load the argument on the stack and call the stub.
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::TAGGED);
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 1);
+ VisitForStackValue(args->at(0));
+ __ CallStub(&stub);
+ context()->Plug(rax);
+}
+
+
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3049,12 +3215,24 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
}
VisitForAccumulatorValue(args->last()); // Function.
+ // Check for proxy.
+ Label proxy, done;
+ __ CmpObjectType(rax, JS_FUNCTION_PROXY_TYPE, rbx);
+ __ j(equal, &proxy);
+
// InvokeFunction requires the function in rdi. Move it in there.
__ movq(rdi, result_register());
ParameterCount count(arg_count);
__ InvokeFunction(rdi, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ jmp(&done);
+
+ __ bind(&proxy);
+ __ push(rax);
+ __ CallRuntime(Runtime::kCall, args->length());
+ __ bind(&done);
+
context()->Plug(rax);
}
@@ -3493,7 +3671,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// One-character separator case
__ bind(&one_char_separator);
- // Get the separator ascii character value.
+ // Get the separator ASCII character value.
// Register "string" holds the separator.
__ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
__ Set(index, 0);
@@ -3617,7 +3795,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
Handle<Code> ic =
isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
- __ call(ic, mode, expr->id());
+ CallIC(ic, mode, expr->id());
// Restore context register.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
} else {
@@ -3637,16 +3815,18 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
if (property != NULL) {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
- __ Push(Smi::FromInt(strict_mode_flag()));
+ StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
+ ? kNonStrictMode : kStrictMode;
+ __ Push(Smi::FromInt(strict_mode_flag));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(rax);
} else if (proxy != NULL) {
Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is allowed.
- ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
+ ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
if (var->IsUnallocated()) {
- __ push(var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ push(GlobalObjectOperand());
__ Push(var->name());
__ Push(Smi::FromInt(kNonStrictMode));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
@@ -3773,7 +3953,7 @@ void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
// accumulator register rax.
VisitForAccumulatorValue(expr->expression());
SetSourcePosition(expr->position());
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
context()->Plug(rax);
}
@@ -3894,7 +4074,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ movq(rdx, rax);
__ Move(rax, Smi::FromInt(1));
}
- __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
+ CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
patch_site.EmitPatchInfo();
__ bind(&done);
@@ -3925,10 +4105,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case NAMED_PROPERTY: {
__ Move(rcx, prop->key()->AsLiteral()->handle());
__ pop(rdx);
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
- : isolate()->builtins()->StoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->StoreIC_Initialize()
+ : isolate()->builtins()->StoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3942,10 +4122,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ pop(rcx);
__ pop(rdx);
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ Handle<Code> ic = is_classic_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize()
+ : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3968,11 +4148,11 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
if (proxy != NULL && proxy->var()->IsUnallocated()) {
Comment cmnt(masm_, "Global variable");
__ Move(rcx, proxy->name());
- __ movq(rax, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
+ __ movq(rax, GlobalObjectOperand());
Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- __ call(ic);
+ CallIC(ic);
PrepareForBailout(expr, TOS_REG);
context()->Plug(rax);
} else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
@@ -4152,7 +4332,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- __ call(ic, RelocInfo::CODE_TARGET, expr->id());
+ CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
patch_site.EmitPatchInfo();
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
diff --git a/src/3rdparty/v8/src/x64/ic-x64.cc b/src/3rdparty/v8/src/x64/ic-x64.cc
index e8ab06c..0632ce4 100644
--- a/src/3rdparty/v8/src/x64/ic-x64.cc
+++ b/src/3rdparty/v8/src/x64/ic-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -462,30 +462,58 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
__ shr(rdi, Immediate(String::kHashShift));
__ xor_(rcx, rdi);
- __ and_(rcx, Immediate(KeyedLookupCache::kCapacityMask));
+ int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
+ __ and_(rcx, Immediate(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label load_in_object_property;
+ static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
+ Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys
= ExternalReference::keyed_lookup_cache_keys(masm->isolate());
- __ movq(rdi, rcx);
- __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
- __ LoadAddress(kScratchRegister, cache_keys);
- __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, 0));
+
+ for (int i = 0; i < kEntriesPerBucket - 1; i++) {
+ Label try_next_entry;
+ __ movq(rdi, rcx);
+ __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
+ __ LoadAddress(kScratchRegister, cache_keys);
+ int off = kPointerSize * i * 2;
+ __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
+ __ j(not_equal, &try_next_entry);
+ __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
+ __ j(equal, &hit_on_nth_entry[i]);
+ __ bind(&try_next_entry);
+ }
+
+ int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
+ __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
__ j(not_equal, &slow);
- __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
+ __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
__ j(not_equal, &slow);
// Get field offset, which is a 32-bit integer.
ExternalReference cache_field_offsets
= ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
- __ LoadAddress(kScratchRegister, cache_field_offsets);
- __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
- __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
- __ subq(rdi, rcx);
- __ j(above_equal, &property_array_property);
+
+ // Hit on nth entry.
+ for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
+ __ bind(&hit_on_nth_entry[i]);
+ if (i != 0) {
+ __ addl(rcx, Immediate(i));
+ }
+ __ LoadAddress(kScratchRegister, cache_field_offsets);
+ __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
+ __ subq(rdi, rcx);
+ __ j(above_equal, &property_array_property);
+ if (i != 0) {
+ __ jmp(&load_in_object_property);
+ }
+ }
// Load in-object property.
+ __ bind(&load_in_object_property);
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ addq(rcx, rdi);
__ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
@@ -531,14 +559,12 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
Register receiver = rdx;
Register index = rax;
- Register scratch1 = rbx;
- Register scratch2 = rcx;
+ Register scratch = rcx;
Register result = rax;
StringCharAtGenerator char_at_generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -609,6 +635,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
Label slow, slow_with_tagged_index, fast, array, extra, check_extra_double;
Label fast_object_with_map_check, fast_object_without_map_check;
Label fast_double_with_map_check, fast_double_without_map_check;
+ Label transition_smi_elements, finish_object_store, non_double_value;
+ Label transition_double_elements;
// Check that the object isn't a smi.
__ JumpIfSmi(rdx, &slow_with_tagged_index);
@@ -711,7 +739,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ bind(&non_smi_value);
// Writing a non-smi, check whether array allows non-smi elements.
// r9: receiver's map
- __ CheckFastObjectElements(r9, &slow, Label::kNear);
+ __ CheckFastObjectElements(r9, &transition_smi_elements);
+ __ bind(&finish_object_store);
__ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
rax);
__ movq(rdx, rax); // Preserve the value which is returned.
@@ -728,8 +757,53 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
__ bind(&fast_double_without_map_check);
// If the value is a number, store it as a double in the FastDoubleElements
// array.
- __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0, &slow);
+ __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
+ &transition_double_elements);
__ ret(0);
+
+ __ bind(&transition_smi_elements);
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+
+ // Transition the array appropriately depending on the value type.
+ __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
+ __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &non_double_value);
+
+ // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
+ // FAST_DOUBLE_ELEMENTS and complete the store.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_DOUBLE_ELEMENTS,
+ rbx,
+ rdi,
+ &slow);
+ ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&fast_double_without_map_check);
+
+ __ bind(&non_double_value);
+ // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ rdi,
+ &slow);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
+
+ __ bind(&transition_double_elements);
+ // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
+ // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
+ // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+ __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ rdi,
+ &slow);
+ ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ jmp(&finish_object_store);
}
@@ -1399,11 +1473,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
// -- rsp[0] : return address
// -----------------------------------
//
- // This accepts as a receiver anything JSObject::SetElementsLength accepts
- // (currently anything except for external and pixel arrays which means
- // anything with elements of FixedArray type.), but currently is restricted
- // to JSArray.
- // Value must be a number, but only smis are accepted as the most common case.
+ // This accepts as a receiver anything JSArray::SetElementsLength accepts
+ // (currently anything except for external arrays which means anything with
+ // elements of FixedArray type). Value must be a number, but only smis are
+ // accepted as the most common case.
Label miss;
@@ -1425,6 +1498,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
__ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
__ j(not_equal, &miss);
+ // Check that the array has fast properties, otherwise the length
+ // property might have been redefined.
+ __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
+ __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
+ Heap::kHashTableMapRootIndex);
+ __ j(equal, &miss);
+
// Check that value is a smi.
__ JumpIfNotSmi(value, &miss);
@@ -1643,6 +1723,9 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
rewritten = stub.GetCode();
} else {
ICCompareStub stub(op_, state);
+ if (state == KNOWN_OBJECTS) {
+ stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
+ }
rewritten = stub.GetCode();
}
set_target(*rewritten);
diff --git a/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc b/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
index 90e897b..56ba6f3 100644
--- a/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-codegen-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,42 +43,31 @@ class SafepointGenerator : public CallWrapper {
public:
SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers,
- int deoptimization_index)
+ Safepoint::DeoptMode mode)
: codegen_(codegen),
pointers_(pointers),
- deoptimization_index_(deoptimization_index) { }
+ deopt_mode_(mode) { }
virtual ~SafepointGenerator() { }
virtual void BeforeCall(int call_size) const {
- ASSERT(call_size >= 0);
- // Ensure that we have enough space after the previous safepoint position
- // for the jump generated there.
- int call_end = codegen_->masm()->pc_offset() + call_size;
- int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize;
- if (call_end < prev_jump_end) {
- int padding_size = prev_jump_end - call_end;
- STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough.
- codegen_->masm()->nop(padding_size);
- }
+ codegen_->EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - call_size);
}
virtual void AfterCall() const {
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
+ codegen_->RecordSafepoint(pointers_, deopt_mode_);
}
private:
- static const int kMinSafepointSize =
- MacroAssembler::kShortCallInstructionLength;
LCodeGen* codegen_;
LPointerMap* pointers_;
- int deoptimization_index_;
+ Safepoint::DeoptMode deopt_mode_;
};
#define __ masm()->
bool LCodeGen::GenerateCode() {
- HPhase phase("Code generation", chunk());
+ HPhase phase("Z_Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
@@ -100,7 +89,6 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(GetStackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code);
- Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
}
@@ -151,7 +139,7 @@ bool LCodeGen::GeneratePrologue() {
// when called as functions (without an explicit receiver
// object). rcx is zero for method calls and non-zero for function
// calls.
- if (info_->is_strict_mode() || info_->is_native()) {
+ if (!info_->is_classic_mode() || info_->is_native()) {
Label ok;
__ testq(rcx, rcx);
__ j(zero, &ok, Label::kNear);
@@ -196,18 +184,17 @@ bool LCodeGen::GeneratePrologue() {
// Possibly allocate a local context.
int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
- if (heap_slots > 0 ||
- (scope()->is_qml_mode() && scope()->is_global_scope())) {
+ if (heap_slots > 0) {
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is still in rdi.
__ push(rdi);
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
+ FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kNewFunctionContext, 1);
}
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
+ RecordSafepoint(Safepoint::kNoLazyDeopt);
// Context is returned in both rax and rsi. It replaces the context
// passed to us. It's saved in the stack and kept live in rsi.
__ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
@@ -256,19 +243,11 @@ bool LCodeGen::GenerateBody() {
instr->CompileToNative(this);
}
}
+ EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
return !is_aborted();
}
-LInstruction* LCodeGen::GetNextInstruction() {
- if (current_instruction_ < instructions_->length() - 1) {
- return instructions_->at(current_instruction_ + 1);
- } else {
- return NULL;
- }
-}
-
-
bool LCodeGen::GenerateJumpTable() {
for (int i = 0; i < jump_table_.length(); i++) {
__ bind(&jump_table_[i].label);
@@ -290,18 +269,6 @@ bool LCodeGen::GenerateDeferredCode() {
code->Generate();
__ jmp(code->exit());
}
-
- // Pad code to ensure that the last piece of deferred code have
- // room for lazy bailout.
- while ((masm()->pc_offset() - LastSafepointEnd())
- < Deoptimizer::patch_size()) {
- int padding = masm()->pc_offset() - LastSafepointEnd();
- if (padding > 9) {
- __ nop(9);
- } else {
- __ nop(padding);
- }
- }
}
// Deferred code is the last part of the instruction sequence. Mark
@@ -313,20 +280,6 @@ bool LCodeGen::GenerateDeferredCode() {
bool LCodeGen::GenerateSafepointTable() {
ASSERT(is_done());
- // Ensure that there is space at the end of the code to write a number
- // of jump instructions, as well as to afford writing a call near the end
- // of the code.
- // The jumps are used when there isn't room in the code stream to write
- // a long call instruction. Instead it writes a shorter call to a
- // jump instruction in the same code object.
- // The calls are used when lazy deoptimizing a function and calls to a
- // deoptimization function.
- int short_deopts = safepoints_.CountShortDeoptimizationIntervals(
- static_cast<unsigned>(MacroAssembler::kJumpInstructionLength));
- int byte_count = (short_deopts) * MacroAssembler::kJumpInstructionLength;
- while (byte_count-- > 0) {
- __ int3();
- }
safepoints_.Emit(masm(), GetStackSlotCount());
return !is_aborted();
}
@@ -415,7 +368,19 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
WriteTranslation(environment->outer(), translation);
int closure_id = DefineDeoptimizationLiteral(environment->closure());
- translation->BeginFrame(environment->ast_id(), closure_id, height);
+ switch (environment->frame_type()) {
+ case JS_FUNCTION:
+ translation->BeginJSFrame(environment->ast_id(), closure_id, height);
+ break;
+ case JS_CONSTRUCT:
+ translation->BeginConstructStubFrame(closure_id, translation_size);
+ break;
+ case ARGUMENTS_ADAPTOR:
+ translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
+ break;
+ default:
+ UNREACHABLE();
+ }
for (int i = 0; i < translation_size; ++i) {
LOperand* value = environment->values()->at(i);
// spilled_registers_ and spilled_double_registers_ are either
@@ -488,11 +453,12 @@ void LCodeGen::CallCodeGeneric(Handle<Code> code,
LInstruction* instr,
SafepointMode safepoint_mode,
int argc) {
+ EnsureSpaceForLazyDeopt(Deoptimizer::patch_size() - masm()->CallSize(code));
ASSERT(instr != NULL);
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
__ call(code, mode);
- RegisterLazyDeoptimization(instr, safepoint_mode, argc);
+ RecordSafepointWithLazyDeopt(instr, safepoint_mode, argc);
// Signal that we don't inline smi code before these stubs in the
// optimizing code generator.
@@ -519,7 +485,7 @@ void LCodeGen::CallRuntime(const Runtime::Function* function,
RecordPosition(pointers->position());
__ CallRuntime(function, num_arguments);
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
}
@@ -529,39 +495,12 @@ void LCodeGen::CallRuntimeFromDeferred(Runtime::FunctionId id,
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ CallRuntimeSaveDoubles(id);
RecordSafepointWithRegisters(
- instr->pointer_map(), argc, Safepoint::kNoDeoptimizationIndex);
-}
-
-
-void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode,
- int argc) {
- // Create the environment to bailout to. If the call has side effects
- // execution has to continue after the call otherwise execution can continue
- // from a previous bailout point repeating the call.
- LEnvironment* deoptimization_environment;
- if (instr->HasDeoptimizationEnvironment()) {
- deoptimization_environment = instr->deoptimization_environment();
- } else {
- deoptimization_environment = instr->environment();
- }
-
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
- if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
- ASSERT(argc == 0);
- RecordSafepoint(instr->pointer_map(),
- deoptimization_environment->deoptimization_index());
- } else {
- ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
- RecordSafepointWithRegisters(
- instr->pointer_map(),
- argc,
- deoptimization_environment->deoptimization_index());
- }
+ instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
}
-void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
+void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode) {
if (!environment->HasBeenRegistered()) {
// Physical stack frame layout:
// -x ............. -4 0 ..................................... y
@@ -577,24 +516,30 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
// |>------------ translation_size ------------<|
int frame_count = 0;
+ int jsframe_count = 0;
for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
++frame_count;
+ if (e->frame_type() == JS_FUNCTION) {
+ ++jsframe_count;
+ }
}
- Translation translation(&translations_, frame_count);
+ Translation translation(&translations_, frame_count, jsframe_count);
WriteTranslation(environment, &translation);
int deoptimization_index = deoptimizations_.length();
- environment->Register(deoptimization_index, translation.index());
+ int pc_offset = masm()->pc_offset();
+ environment->Register(deoptimization_index,
+ translation.index(),
+ (mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
deoptimizations_.Add(environment);
}
}
void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(environment->HasBeenRegistered());
int id = environment->deoptimization_index();
Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
- ASSERT(entry != NULL);
if (entry == NULL) {
Abort("bailout was not prepared");
return;
@@ -617,7 +562,6 @@ void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
int length = deoptimizations_.length();
if (length == 0) return;
- ASSERT(FLAG_deopt);
Handle<DeoptimizationInputData> data =
factory()->NewDeoptimizationInputData(length, TENURED);
@@ -642,6 +586,7 @@ void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
data->SetArgumentsStackHeight(i,
Smi::FromInt(env->arguments_stack_height()));
+ data->SetPc(i, Smi::FromInt(env->pc_offset()));
}
code->set_deoptimization_data(*data);
}
@@ -673,17 +618,29 @@ void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
}
+void LCodeGen::RecordSafepointWithLazyDeopt(
+ LInstruction* instr, SafepointMode safepoint_mode, int argc) {
+ if (safepoint_mode == RECORD_SIMPLE_SAFEPOINT) {
+ RecordSafepoint(instr->pointer_map(), Safepoint::kLazyDeopt);
+ } else {
+ ASSERT(safepoint_mode == RECORD_SAFEPOINT_WITH_REGISTERS);
+ RecordSafepointWithRegisters(
+ instr->pointer_map(), argc, Safepoint::kLazyDeopt);
+ }
+}
+
+
void LCodeGen::RecordSafepoint(
LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index) {
+ Safepoint::DeoptMode deopt_mode) {
ASSERT(kind == expected_safepoint_kind_);
const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
- kind, arguments, deoptimization_index);
+ kind, arguments, deopt_mode);
for (int i = 0; i < operands->length(); i++) {
LOperand* pointer = operands->at(i);
if (pointer->IsStackSlot()) {
@@ -700,22 +657,21 @@ void LCodeGen::RecordSafepoint(
void LCodeGen::RecordSafepoint(LPointerMap* pointers,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(pointers, Safepoint::kSimple, 0, deopt_mode);
}
-void LCodeGen::RecordSafepoint(int deoptimization_index) {
+void LCodeGen::RecordSafepoint(Safepoint::DeoptMode deopt_mode) {
LPointerMap empty_pointers(RelocInfo::kNoPosition);
- RecordSafepoint(&empty_pointers, deoptimization_index);
+ RecordSafepoint(&empty_pointers, deopt_mode);
}
void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index) {
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
- deoptimization_index);
+ Safepoint::DeoptMode deopt_mode) {
+ RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
}
@@ -750,12 +706,6 @@ void LCodeGen::DoGap(LGap* gap) {
LParallelMove* move = gap->GetParallelMove(inner_pos);
if (move != NULL) DoParallelMove(move);
}
-
- LInstruction* next = GetNextInstruction();
- if (next != NULL && next->IsLazyBailout()) {
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
- }
}
@@ -1049,11 +999,11 @@ void LCodeGen::DoMulI(LMulI* instr) {
DeoptimizeIf(no_condition, instr->environment());
}
} else if (right->IsStackSlot()) {
- __ or_(kScratchRegister, ToOperand(right));
+ __ orl(kScratchRegister, ToOperand(right));
DeoptimizeIf(sign, instr->environment());
} else {
// Test the non-zero operand for negative sign.
- __ or_(kScratchRegister, ToRegister(right));
+ __ orl(kScratchRegister, ToRegister(right));
DeoptimizeIf(sign, instr->environment());
}
__ bind(&done);
@@ -1218,8 +1168,13 @@ void LCodeGen::DoConstantD(LConstantD* instr) {
void LCodeGen::DoConstantT(LConstantT* instr) {
- ASSERT(instr->result()->IsRegister());
- __ Move(ToRegister(instr->result()), instr->value());
+ Handle<Object> value = instr->value();
+ if (value->IsSmi()) {
+ __ Move(ToRegister(instr->result()), value);
+ } else {
+ __ LoadHeapObject(ToRegister(instr->result()),
+ Handle<HeapObject>::cast(value));
+ }
}
@@ -1268,6 +1223,49 @@ void LCodeGen::DoValueOf(LValueOf* instr) {
}
+void LCodeGen::DoDateField(LDateField* instr) {
+ Register object = ToRegister(instr->InputAt(0));
+ Register result = ToRegister(instr->result());
+ Smi* index = instr->index();
+ Label runtime, done;
+ ASSERT(object.is(result));
+ ASSERT(object.is(rax));
+
+#ifdef DEBUG
+ __ AbortIfSmi(object);
+ __ CmpObjectType(object, JS_DATE_TYPE, kScratchRegister);
+ __ Assert(equal, "Trying to get date field from non-date.");
+#endif
+
+ if (index->value() == 0) {
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset));
+ } else {
+ if (index->value() < JSDate::kFirstUncachedField) {
+ ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
+ __ movq(kScratchRegister, stamp);
+ __ cmpq(kScratchRegister, FieldOperand(object,
+ JSDate::kCacheStampOffset));
+ __ j(not_equal, &runtime, Label::kNear);
+ __ movq(result, FieldOperand(object, JSDate::kValueOffset +
+ kPointerSize * index->value()));
+ __ jmp(&done);
+ }
+ __ bind(&runtime);
+ __ PrepareCallCFunction(2);
+#ifdef _WIN64
+ __ movq(rcx, object);
+ __ movq(rdx, index, RelocInfo::NONE);
+#else
+ __ movq(rdi, object);
+ __ movq(rsi, index, RelocInfo::NONE);
+#endif
+ __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ bind(&done);
+ }
+}
+
+
void LCodeGen::DoBitNotI(LBitNotI* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->Equals(instr->result()));
@@ -1681,6 +1679,30 @@ void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
}
+Condition LCodeGen::EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string) {
+ __ JumpIfSmi(input, is_not_string);
+ Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
+
+ return cond;
+}
+
+
+void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
+ Register reg = ToRegister(instr->InputAt(0));
+ Register temp = ToRegister(instr->TempAt(0));
+
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+ Label* false_label = chunk_->GetAssemblyLabel(false_block);
+
+ Condition true_cond = EmitIsString(reg, temp, false_label);
+
+ EmitBranch(true_block, false_block, true_cond);
+}
+
+
void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -1712,6 +1734,21 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
}
+void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
+ Token::Value op = instr->op();
+ int true_block = chunk_->LookupDestination(instr->true_block_id());
+ int false_block = chunk_->LookupDestination(instr->false_block_id());
+
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ CallCode(ic, RelocInfo::CODE_TARGET, instr);
+
+ Condition condition = TokenToCondition(op, false);
+ __ testq(rax, rax);
+
+ EmitBranch(true_block, false_block, condition);
+}
+
+
static InstanceType TestType(HHasInstanceTypeAndBranch* instr) {
InstanceType from = instr->from();
InstanceType to = instr->to();
@@ -1775,13 +1812,17 @@ void LCodeGen::DoHasCachedArrayIndexAndBranch(
// Branches to a label or falls through with the answer in the z flag.
-// Trashes the temp register and possibly input (if it and temp are aliased).
+// Trashes the temp register.
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String> class_name,
Register input,
Register temp,
- Register scratch) {
+ Register temp2) {
+ ASSERT(!input.is(temp));
+ ASSERT(!input.is(temp2));
+ ASSERT(!temp.is(temp2));
+
__ JumpIfSmi(input, is_false);
if (class_name->IsEqualTo(CStrVector("Function"))) {
@@ -1802,11 +1843,10 @@ void LCodeGen::EmitClassOfTest(Label* is_true,
// Faster code path to avoid two compares: subtract lower bound from the
// actual type and do a signed compare with the width of the type range.
__ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
- __ movq(scratch, FieldOperand(temp, Map::kInstanceTypeOffset));
- __ subb(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
- __ cmpb(scratch,
- Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
- FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)));
+ __ movzxbl(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
+ __ subq(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
+ __ cmpq(temp2, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
+ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
__ j(above, is_false);
}
@@ -1890,7 +1930,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) { }
virtual void Generate() {
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
+ codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
}
virtual LInstruction* instr() { return instr_; }
Label* map_check() { return &map_check_; }
@@ -1917,9 +1957,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
- __ movq(kScratchRegister, factory()->the_hole_value(),
- RelocInfo::EMBEDDED_OBJECT);
- __ cmpq(map, kScratchRegister); // Patched to cached map.
+ Handle<JSGlobalPropertyCell> cache_cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ cmpq(map, Operand(kScratchRegister, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
@@ -1948,8 +1989,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
}
-void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
+void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check) {
{
PushSafepointRegistersScope scope(this);
InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
@@ -1957,7 +1998,7 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
InstanceofStub stub(flags);
__ push(ToRegister(instr->InputAt(0)));
- __ Push(instr->function());
+ __ PushHeapObject(instr->function());
static const int kAdditionalDelta = 10;
int delta =
@@ -1975,6 +2016,9 @@ void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
RECORD_SAFEPOINT_WITH_REGISTERS,
2);
ASSERT(delta == masm_->SizeOfCodeGeneratedSince(map_check));
+ ASSERT(instr->HasDeoptimizationEnvironment());
+ LEnvironment* env = instr->deoptimization_environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
// Move result to a register that survives the end of the
// PushSafepointRegisterScope.
__ movq(kScratchRegister, rax);
@@ -2024,13 +2068,7 @@ void LCodeGen::DoReturn(LReturn* instr) {
void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
Register result = ToRegister(instr->result());
- if (result.is(rax)) {
- __ load_rax(instr->hydrogen()->cell().location(),
- RelocInfo::GLOBAL_PROPERTY_CELL);
- } else {
- __ movq(result, instr->hydrogen()->cell(), RelocInfo::GLOBAL_PROPERTY_CELL);
- __ movq(result, Operand(result, 0));
- }
+ __ LoadGlobalCell(result, instr->hydrogen()->cell());
if (instr->hydrogen()->RequiresHoleCheck()) {
__ CompareRoot(result, Heap::kTheHoleValueRootIndex);
DeoptimizeIf(equal, instr->environment());
@@ -2051,44 +2089,28 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
- Register object = ToRegister(instr->TempAt(0));
- Register address = ToRegister(instr->TempAt(1));
- Register value = ToRegister(instr->InputAt(0));
- ASSERT(!value.is(object));
- Handle<JSGlobalPropertyCell> cell_handle(instr->hydrogen()->cell());
-
- __ movq(address, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
+ Register value = ToRegister(instr->value());
+ Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell();
// If the cell we are storing to contains the hole it could have
// been deleted from the property dictionary. In that case, we need
// to update the property details in the property dictionary to mark
// it as no longer deleted. We deoptimize in that case.
if (instr->hydrogen()->RequiresHoleCheck()) {
- __ CompareRoot(Operand(address, 0), Heap::kTheHoleValueRootIndex);
+ // We have a temp because CompareRoot might clobber kScratchRegister.
+ Register cell = ToRegister(instr->TempAt(0));
+ ASSERT(!value.is(cell));
+ __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
DeoptimizeIf(equal, instr->environment());
+ // Store the value.
+ __ movq(Operand(cell, 0), value);
+ } else {
+ // Store the value.
+ __ movq(kScratchRegister, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ movq(Operand(kScratchRegister, 0), value);
}
-
- // Store the value.
- __ movq(Operand(address, 0), value);
-
- if (instr->hydrogen()->NeedsWriteBarrier()) {
- Label smi_store;
- HType type = instr->hydrogen()->value()->type();
- if (!type.IsHeapNumber() && !type.IsString() && !type.IsNonPrimitive()) {
- __ JumpIfSmi(value, &smi_store, Label::kNear);
- }
-
- int offset = JSGlobalPropertyCell::kValueOffset - kHeapObjectTag;
- __ lea(object, Operand(address, -offset));
- // Cells are always in the remembered set.
- __ RecordWrite(object,
- address,
- value,
- kSaveFPRegs,
- OMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
- __ bind(&smi_store);
- }
+ // Cells are always rescanned, so no write barrier here.
}
@@ -2097,7 +2119,7 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(rax));
__ Move(rcx, instr->name());
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
@@ -2108,13 +2130,37 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
__ movq(result, ContextOperand(context, instr->slot_index()));
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(equal, instr->environment());
+ } else {
+ Label is_not_hole;
+ __ j(not_equal, &is_not_hole, Label::kNear);
+ __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
+ __ bind(&is_not_hole);
+ }
+ }
}
void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
Register context = ToRegister(instr->context());
Register value = ToRegister(instr->value());
- __ movq(ContextOperand(context, instr->slot_index()), value);
+
+ Operand target = ContextOperand(context, instr->slot_index());
+
+ Label skip_assignment;
+ if (instr->hydrogen()->RequiresHoleCheck()) {
+ __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
+ if (instr->hydrogen()->DeoptimizesOnHole()) {
+ DeoptimizeIf(equal, instr->environment());
+ } else {
+ __ j(not_equal, &skip_assignment);
+ }
+ }
+ __ movq(target, value);
+
if (instr->hydrogen()->NeedsWriteBarrier()) {
HType type = instr->hydrogen()->value()->type();
SmiCheck check_needed =
@@ -2129,6 +2175,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
EMIT_REMEMBERED_SET,
check_needed);
}
+
+ __ bind(&skip_assignment);
}
@@ -2150,7 +2198,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
Handle<String> name) {
LookupResult lookup(isolate());
type->LookupInDescriptors(NULL, *name, &lookup);
- ASSERT(lookup.IsProperty() &&
+ ASSERT(lookup.IsFound() &&
(lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION));
if (lookup.type() == FIELD) {
int index = lookup.GetLocalFieldIndexFromMap(*type);
@@ -2166,7 +2214,7 @@ void LCodeGen::EmitLoadFieldOrConstantFunction(Register result,
}
} else {
Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type));
- LoadHeapObject(result, Handle<HeapObject>::cast(function));
+ __ LoadHeapObject(result, function);
}
}
@@ -2495,14 +2543,9 @@ void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
}
-void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
Register receiver = ToRegister(instr->receiver());
Register function = ToRegister(instr->function());
- Register length = ToRegister(instr->length());
- Register elements = ToRegister(instr->elements());
- ASSERT(receiver.is(rax)); // Used for parameter count.
- ASSERT(function.is(rdi)); // Required by InvokeFunction.
- ASSERT(ToRegister(instr->result()).is(rax));
// If the receiver is null or undefined, we have to pass the global
// object as a receiver to normal functions. Values have to be
@@ -2545,6 +2588,17 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ movq(receiver,
FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
__ bind(&receiver_ok);
+}
+
+
+void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
+ Register receiver = ToRegister(instr->receiver());
+ Register function = ToRegister(instr->function());
+ Register length = ToRegister(instr->length());
+ Register elements = ToRegister(instr->elements());
+ ASSERT(receiver.is(rax)); // Used for parameter count.
+ ASSERT(function.is(rdi)); // Required by InvokeFunction.
+ ASSERT(ToRegister(instr->result()).is(rax));
// Copy the arguments to this function possibly from the
// adaptor frame below it.
@@ -2570,13 +2624,10 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
__ bind(&invoke);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
- v8::internal::ParameterCount actual(rax);
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount actual(rax);
__ InvokeFunction(function, actual, CALL_FUNCTION,
safepoint_generator, CALL_AS_METHOD);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2591,7 +2642,7 @@ void LCodeGen::DoPushArgument(LPushArgument* instr) {
void LCodeGen::DoThisFunction(LThisFunction* instr) {
Register result = ToRegister(instr->result());
- LoadHeapObject(result, instr->hydrogen()->closure());
+ __ LoadHeapObject(result, instr->hydrogen()->closure());
}
@@ -2609,9 +2660,17 @@ void LCodeGen::DoOuterContext(LOuterContext* instr) {
}
+void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
+ __ push(rsi); // The context is the first argument.
+ __ PushHeapObject(instr->hydrogen()->pairs());
+ __ Push(Smi::FromInt(instr->hydrogen()->flags()));
+ CallRuntime(Runtime::kDeclareGlobals, 3, instr);
+}
+
+
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
Register result = ToRegister(instr->result());
- __ movq(result, instr->qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
+ __ movq(result, GlobalObjectOperand());
}
@@ -2626,35 +2685,48 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
int arity,
LInstruction* instr,
CallKind call_kind) {
- // Change context if needed.
- bool change_context =
- (info()->closure()->context() != function->context()) ||
- scope()->contains_with() ||
- (scope()->num_heap_slots() > 0);
- if (change_context) {
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- }
-
- // Set rax to arguments count if adaption is not needed. Assumes that rax
- // is available to write to at this point.
- if (!function->NeedsArgumentsAdaption()) {
- __ Set(rax, arity);
- }
+ bool can_invoke_directly = !function->NeedsArgumentsAdaption() ||
+ function->shared()->formal_parameter_count() == arity;
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
- // Invoke function.
- __ SetCallKind(rcx, call_kind);
- if (*function == *info()->closure()) {
- __ CallSelf();
+ if (can_invoke_directly) {
+ __ LoadHeapObject(rdi, function);
+
+ // Change context if needed.
+ bool change_context =
+ (info()->closure()->context() != function->context()) ||
+ scope()->contains_with() ||
+ (scope()->num_heap_slots() > 0);
+ if (change_context) {
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+ }
+
+ // Set rax to arguments count if adaption is not needed. Assumes that rax
+ // is available to write to at this point.
+ if (!function->NeedsArgumentsAdaption()) {
+ __ Set(rax, arity);
+ }
+
+ // Invoke function.
+ __ SetCallKind(rcx, call_kind);
+ if (*function == *info()->closure()) {
+ __ CallSelf();
+ } else {
+ __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+ }
+
+ // Set up deoptimization.
+ RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
} else {
- __ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
+ // We need to adapt arguments.
+ SafepointGenerator generator(
+ this, pointers, Safepoint::kLazyDeopt);
+ ParameterCount count(arity);
+ __ InvokeFunction(function, count, CALL_FUNCTION, generator, call_kind);
}
- // Setup deoptimization.
- RegisterLazyDeoptimization(instr, RECORD_SIMPLE_SAFEPOINT, 0);
-
// Restore context.
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
}
@@ -2662,7 +2734,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
ASSERT(ToRegister(instr->result()).is(rax));
- __ Move(rdi, instr->function());
CallKnownFunction(instr->function(),
instr->arity(),
instr,
@@ -2836,10 +2907,10 @@ void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
// This addition might give a result that isn't the correct for
// rounding, due to loss of precision, but only for a number that's
// so big that the conversion below will overflow anyway.
- __ addsd(input_reg, xmm_scratch);
+ __ addsd(xmm_scratch, input_reg);
// Compute Math.floor(input).
// Use truncating instruction (OK because input is positive).
- __ cvttsd2si(output_reg, input_reg);
+ __ cvttsd2si(output_reg, xmm_scratch);
// Overflow is signalled with minint.
__ cmpl(output_reg, Immediate(0x80000000));
DeoptimizeIf(equal, instr->environment());
@@ -2877,65 +2948,158 @@ void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
XMMRegister xmm_scratch = xmm0;
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
+
+ // Note that according to ECMA-262 15.8.2.13:
+ // Math.pow(-Infinity, 0.5) == Infinity
+ // Math.sqrt(-Infinity) == NaN
+ Label done, sqrt;
+ // Check base for -Infinity. According to IEEE-754, double-precision
+ // -Infinity has the highest 12 bits set and the lowest 52 bits cleared.
+ __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE);
+ __ movq(xmm_scratch, kScratchRegister);
+ __ ucomisd(xmm_scratch, input_reg);
+ // Comparing -Infinity with NaN results in "unordered", which sets the
+ // zero flag as if both were equal. However, it also sets the carry flag.
+ __ j(not_equal, &sqrt, Label::kNear);
+ __ j(carry, &sqrt, Label::kNear);
+ // If input is -Infinity, return Infinity.
+ __ xorps(input_reg, input_reg);
+ __ subsd(input_reg, xmm_scratch);
+ __ jmp(&done, Label::kNear);
+
+ // Square root.
+ __ bind(&sqrt);
__ xorps(xmm_scratch, xmm_scratch);
__ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
__ sqrtsd(input_reg, input_reg);
+ __ bind(&done);
}
void LCodeGen::DoPower(LPower* instr) {
- LOperand* left = instr->InputAt(0);
- XMMRegister left_reg = ToDoubleRegister(left);
- ASSERT(!left_reg.is(xmm1));
- LOperand* right = instr->InputAt(1);
- XMMRegister result_reg = ToDoubleRegister(instr->result());
Representation exponent_type = instr->hydrogen()->right()->representation();
- if (exponent_type.IsDouble()) {
- __ PrepareCallCFunction(2);
- // Move arguments to correct registers
- __ movaps(xmm0, left_reg);
- ASSERT(ToDoubleRegister(right).is(xmm1));
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 2);
- } else if (exponent_type.IsInteger32()) {
- __ PrepareCallCFunction(2);
- // Move arguments to correct registers: xmm0 and edi (not rdi).
- // On Windows, the registers are xmm0 and edx.
- __ movaps(xmm0, left_reg);
+ // Having marked this as a call, we can use any registers.
+ // Just make sure that the input/output registers are the expected ones.
+
+ // Choose register conforming to calling convention (when bailing out).
#ifdef _WIN64
- ASSERT(ToRegister(right).is(rdx));
+ Register exponent = rdx;
#else
- ASSERT(ToRegister(right).is(rdi));
+ Register exponent = rdi;
#endif
- __ CallCFunction(
- ExternalReference::power_double_int_function(isolate()), 2);
+ ASSERT(!instr->InputAt(1)->IsRegister() ||
+ ToRegister(instr->InputAt(1)).is(exponent));
+ ASSERT(!instr->InputAt(1)->IsDoubleRegister() ||
+ ToDoubleRegister(instr->InputAt(1)).is(xmm1));
+ ASSERT(ToDoubleRegister(instr->InputAt(0)).is(xmm2));
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
+
+ if (exponent_type.IsTagged()) {
+ Label no_deopt;
+ __ JumpIfSmi(exponent, &no_deopt);
+ __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&no_deopt);
+ MathPowStub stub(MathPowStub::TAGGED);
+ __ CallStub(&stub);
+ } else if (exponent_type.IsInteger32()) {
+ MathPowStub stub(MathPowStub::INTEGER);
+ __ CallStub(&stub);
} else {
- ASSERT(exponent_type.IsTagged());
- Register right_reg = ToRegister(right);
+ ASSERT(exponent_type.IsDouble());
+ MathPowStub stub(MathPowStub::DOUBLE);
+ __ CallStub(&stub);
+ }
+}
- Label non_smi, call;
- __ JumpIfNotSmi(right_reg, &non_smi);
- __ SmiToInteger32(right_reg, right_reg);
- __ cvtlsi2sd(xmm1, right_reg);
- __ jmp(&call);
- __ bind(&non_smi);
- __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister);
- DeoptimizeIf(not_equal, instr->environment());
- __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset));
+void LCodeGen::DoRandom(LRandom* instr) {
+ class DeferredDoRandom: public LDeferredCode {
+ public:
+ DeferredDoRandom(LCodeGen* codegen, LRandom* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredRandom(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LRandom* instr_;
+ };
- __ bind(&call);
- __ PrepareCallCFunction(2);
- // Move arguments to correct registers xmm0 and xmm1.
- __ movaps(xmm0, left_reg);
- // Right argument is already in xmm1.
- __ CallCFunction(
- ExternalReference::power_double_double_function(isolate()), 2);
- }
- // Return value is in xmm0.
- __ movaps(result_reg, xmm0);
- // Restore context register.
+ DeferredDoRandom* deferred = new DeferredDoRandom(this, instr);
+
+ // Having marked this instruction as a call we can use any
+ // registers.
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
+
+ // Choose the right register for the first argument depending on
+ // calling convention.
+#ifdef _WIN64
+ ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
+ Register global_object = rcx;
+#else
+ ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
+ Register global_object = rdi;
+#endif
+
+ static const int kSeedSize = sizeof(uint32_t);
+ STATIC_ASSERT(kPointerSize == 2 * kSeedSize);
+
+ __ movq(global_object,
+ FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
+ static const int kRandomSeedOffset =
+ FixedArray::kHeaderSize + Context::RANDOM_SEED_INDEX * kPointerSize;
+ __ movq(rbx, FieldOperand(global_object, kRandomSeedOffset));
+ // rbx: FixedArray of the global context's random seeds
+
+ // Load state[0].
+ __ movl(rax, FieldOperand(rbx, ByteArray::kHeaderSize));
+ // If state[0] == 0, call runtime to initialize seeds.
+ __ testl(rax, rax);
+ __ j(zero, deferred->entry());
+ // Load state[1].
+ __ movl(rcx, FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize));
+
+ // state[0] = 18273 * (state[0] & 0xFFFF) + (state[0] >> 16)
+ // Only operate on the lower 32 bit of rax.
+ __ movl(rdx, rax);
+ __ andl(rdx, Immediate(0xFFFF));
+ __ imull(rdx, rdx, Immediate(18273));
+ __ shrl(rax, Immediate(16));
+ __ addl(rax, rdx);
+ // Save state[0].
+ __ movl(FieldOperand(rbx, ByteArray::kHeaderSize), rax);
+
+ // state[1] = 36969 * (state[1] & 0xFFFF) + (state[1] >> 16)
+ __ movl(rdx, rcx);
+ __ andl(rdx, Immediate(0xFFFF));
+ __ imull(rdx, rdx, Immediate(36969));
+ __ shrl(rcx, Immediate(16));
+ __ addl(rcx, rdx);
+ // Save state[1].
+ __ movl(FieldOperand(rbx, ByteArray::kHeaderSize + kSeedSize), rcx);
+
+ // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF)
+ __ shll(rax, Immediate(14));
+ __ andl(rcx, Immediate(0x3FFFF));
+ __ addl(rax, rcx);
+
+ __ bind(deferred->exit());
+ // Convert 32 random bits in rax to 0.(32 random bits) in a double
+ // by computing:
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
+ __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
+ __ movd(xmm2, rcx);
+ __ movd(xmm1, rax);
+ __ cvtss2sd(xmm2, xmm2);
+ __ xorps(xmm1, xmm2);
+ __ subsd(xmm1, xmm2);
+}
+
+
+void LCodeGen::DoDeferredRandom(LRandom* instr) {
+ __ PrepareCallCFunction(1);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ // Return value is in rax.
}
@@ -2947,6 +3111,14 @@ void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
}
+void LCodeGen::DoMathTan(LUnaryMathOperation* instr) {
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
+ TranscendentalCacheStub stub(TranscendentalCache::TAN,
+ TranscendentalCacheStub::UNTAGGED);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+}
+
+
void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::COS,
@@ -2986,6 +3158,9 @@ void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
case kMathSin:
DoMathSin(instr);
break;
+ case kMathTan:
+ DoMathTan(instr);
+ break;
case kMathLog:
DoMathLog(instr);
break;
@@ -3001,10 +3176,8 @@ void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
ASSERT(instr->HasPointerMap());
ASSERT(instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- SafepointGenerator generator(this, pointers, env->deoptimization_index());
+ SafepointGenerator generator(this, pointers, Safepoint::kLazyDeopt);
ParameterCount count(instr->arity());
__ InvokeFunction(rdi, count, CALL_FUNCTION, generator, CALL_AS_METHOD);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -3037,13 +3210,13 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
void LCodeGen::DoCallFunction(LCallFunction* instr) {
+ ASSERT(ToRegister(instr->function()).is(rdi));
ASSERT(ToRegister(instr->result()).is(rax));
int arity = instr->arity();
CallFunctionStub stub(arity, NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ Drop(1);
}
@@ -3061,7 +3234,6 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
ASSERT(ToRegister(instr->result()).is(rax));
- __ Move(rdi, instr->target());
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
}
@@ -3070,9 +3242,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
ASSERT(ToRegister(instr->result()).is(rax));
- Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
+ CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
__ Set(rax, instr->arity());
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
+ CallCode(stub.GetCode(), RelocInfo::CONSTRUCT_CALL, instr);
}
@@ -3131,7 +3303,7 @@ void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
ASSERT(ToRegister(instr->value()).is(rax));
__ Move(rcx, instr->hydrogen()->name());
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3180,17 +3352,25 @@ void LCodeGen::DoStoreKeyedSpecializedArrayElement(
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- if (instr->index()->IsConstantOperand()) {
- if (instr->length()->IsRegister()) {
- __ cmpq(ToRegister(instr->length()),
+ if (instr->length()->IsRegister()) {
+ Register reg = ToRegister(instr->length());
+ if (FLAG_debug_code) {
+ __ AbortIfNotZeroExtended(reg);
+ }
+ if (instr->index()->IsConstantOperand()) {
+ __ cmpq(reg,
Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
} else {
- __ cmpq(ToOperand(instr->length()),
- Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
+ Register reg2 = ToRegister(instr->index());
+ if (FLAG_debug_code) {
+ __ AbortIfNotZeroExtended(reg2);
+ }
+ __ cmpq(reg, reg2);
}
} else {
- if (instr->length()->IsRegister()) {
- __ cmpq(ToRegister(instr->length()), ToRegister(instr->index()));
+ if (instr->index()->IsConstantOperand()) {
+ __ cmpq(ToOperand(instr->length()),
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
} else {
__ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
}
@@ -3204,13 +3384,6 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
Register elements = ToRegister(instr->object());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
- // This instruction cannot handle the FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
- // conversion, so it deopts in that case.
- if (instr->hydrogen()->ValueNeedsSmiCheck()) {
- Condition cc = masm()->CheckSmi(value);
- DeoptimizeIf(NegateCondition(cc), instr->environment());
- }
-
// Do the store.
if (instr->key()->IsConstantOperand()) {
ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
@@ -3269,7 +3442,7 @@ void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
ASSERT(ToRegister(instr->key()).is(rcx));
ASSERT(ToRegister(instr->value()).is(rax));
- Handle<Code> ic = instr->strict_mode()
+ Handle<Code> ic = (instr->strict_mode_flag() == kStrictMode)
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
CallCode(ic, RelocInfo::CODE_TARGET, instr);
@@ -3336,81 +3509,14 @@ void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
LStringCharCodeAt* instr_;
};
- Register string = ToRegister(instr->string());
- Register index = ToRegister(instr->index());
- Register result = ToRegister(instr->result());
-
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- // Fetch the instance type of the receiver into result register.
- __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
- __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
-
- // We need special handling for indirect strings.
- Label check_sequential;
- __ testb(result, Immediate(kIsIndirectStringMask));
- __ j(zero, &check_sequential, Label::kNear);
-
- // Dispatch on the indirect string shape: slice or cons.
- Label cons_string;
- __ testb(result, Immediate(kSlicedNotConsMask));
- __ j(zero, &cons_string, Label::kNear);
-
- // Handle slices.
- Label indirect_string_loaded;
- __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
- __ addq(index, result);
- __ movq(string, FieldOperand(string, SlicedString::kParentOffset));
- __ jmp(&indirect_string_loaded, Label::kNear);
-
- // Handle conses.
- // Check whether the right hand side is the empty string (i.e. if
- // this is really a flat string in a cons string). If that is not
- // the case we would rather go to the runtime system now to flatten
- // the string.
- __ bind(&cons_string);
- __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
- Heap::kEmptyStringRootIndex);
- __ j(not_equal, deferred->entry());
- __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
-
- __ bind(&indirect_string_loaded);
- __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
- __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
-
- // Check whether the string is sequential. The only non-sequential
- // shapes we support have just been unwrapped above.
- __ bind(&check_sequential);
- STATIC_ASSERT(kSeqStringTag == 0);
- __ testb(result, Immediate(kStringRepresentationMask));
- __ j(not_zero, deferred->entry());
-
- // Dispatch on the encoding: ASCII or two-byte.
- Label ascii_string;
- STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
- STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
- __ testb(result, Immediate(kStringEncodingMask));
- __ j(not_zero, &ascii_string, Label::kNear);
-
- // Two-byte string.
- // Load the two-byte character code into the result register.
- Label done;
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- __ movzxwl(result, FieldOperand(string,
- index,
- times_2,
- SeqTwoByteString::kHeaderSize));
- __ jmp(&done, Label::kNear);
-
- // ASCII string.
- // Load the byte into the result register.
- __ bind(&ascii_string);
- __ movzxbl(result, FieldOperand(string,
- index,
- times_1,
- SeqAsciiString::kHeaderSize));
- __ bind(&done);
+ StringCharLoadGenerator::Generate(masm(),
+ ToRegister(instr->string()),
+ ToRegister(instr->index()),
+ ToRegister(instr->result()),
+ deferred->entry());
__ bind(deferred->exit());
}
@@ -3588,6 +3694,7 @@ void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env) {
Label load_smi, done;
@@ -3615,6 +3722,15 @@ void LCodeGen::EmitNumberUntagD(Register input_reg,
}
// Heap number to XMM conversion.
__ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
+ if (deoptimize_on_minus_zero) {
+ XMMRegister xmm_scratch = xmm0;
+ __ xorps(xmm_scratch, xmm_scratch);
+ __ ucomisd(xmm_scratch, result_reg);
+ __ j(not_equal, &done, Label::kNear);
+ __ movmskpd(kScratchRegister, result_reg);
+ __ testq(kScratchRegister, Immediate(1));
+ DeoptimizeIf(not_zero, env);
+ }
__ jmp(&done, Label::kNear);
// Smi to XMM conversion
@@ -3706,6 +3822,7 @@ void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
EmitNumberUntagD(input_reg, result_reg,
instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment());
}
@@ -3811,20 +3928,46 @@ void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
- ASSERT(instr->InputAt(0)->IsRegister());
- Register reg = ToRegister(instr->InputAt(0));
- __ Cmp(reg, instr->hydrogen()->target());
+ Register reg = ToRegister(instr->value());
+ Handle<JSFunction> target = instr->hydrogen()->target();
+ if (isolate()->heap()->InNewSpace(*target)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(target);
+ __ movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ cmpq(reg, Operand(kScratchRegister, 0));
+ } else {
+ __ Cmp(reg, target);
+ }
DeoptimizeIf(not_equal, instr->environment());
}
-void LCodeGen::DoCheckMap(LCheckMap* instr) {
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Handle<Map> map,
+ CompareMapMode mode,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMap(reg, map, &success, mode);
+ DeoptimizeIf(not_equal, env);
+ __ bind(&success);
+}
+
+
+void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- instr->hydrogen()->map());
- DeoptimizeIf(not_equal, instr->environment());
+
+ Label success;
+ SmallMapList* map_set = instr->hydrogen()->map_set();
+ for (int i = 0; i < map_set->length() - 1; i++) {
+ Handle<Map> map = map_set->at(i);
+ __ CompareMap(reg, map, &success, REQUIRE_EXACT_MAP);
+ __ j(equal, &success);
+ }
+ Handle<Map> map = map_set->last();
+ DoCheckMapCommon(reg, map, REQUIRE_EXACT_MAP, instr->environment());
+ __ bind(&success);
}
@@ -3879,18 +4022,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
- if (heap()->InNewSpace(*object)) {
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(object);
- __ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
- __ movq(result, Operand(result, 0));
- } else {
- __ Move(result, object);
- }
-}
-
-
void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Register reg = ToRegister(instr->TempAt(0));
@@ -3898,37 +4029,147 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
Handle<JSObject> current_prototype = instr->prototype();
// Load prototype object.
- LoadHeapObject(reg, current_prototype);
+ __ LoadHeapObject(reg, current_prototype);
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
- LoadHeapObject(reg, current_prototype);
+ __ LoadHeapObject(reg, current_prototype);
}
// Check the holder map.
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
+}
+
+
+void LCodeGen::DoAllocateObject(LAllocateObject* instr) {
+ class DeferredAllocateObject: public LDeferredCode {
+ public:
+ DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr)
+ : LDeferredCode(codegen), instr_(instr) { }
+ virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LAllocateObject* instr_;
+ };
+
+ DeferredAllocateObject* deferred = new DeferredAllocateObject(this, instr);
+
+ Register result = ToRegister(instr->result());
+ Register scratch = ToRegister(instr->TempAt(0));
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+ ASSERT(initial_map->pre_allocated_property_fields() +
+ initial_map->unused_property_fields() -
+ initial_map->inobject_properties() == 0);
+
+ // Allocate memory for the object. The initial map might change when
+ // the constructor's prototype changes, but instance size and property
+ // counts remain unchanged (if slack tracking finished).
+ ASSERT(!constructor->shared()->IsInobjectSlackTrackingInProgress());
+ __ AllocateInNewSpace(instance_size,
+ result,
+ no_reg,
+ scratch,
+ deferred->entry(),
+ TAG_OBJECT);
+
+ __ bind(deferred->exit());
+ if (FLAG_debug_code) {
+ Label is_in_new_space;
+ __ JumpIfInNewSpace(result, scratch, &is_in_new_space);
+ __ Abort("Allocated object is not in new-space");
+ __ bind(&is_in_new_space);
+ }
+
+ // Load the initial map.
+ Register map = scratch;
+ __ LoadHeapObject(scratch, constructor);
+ __ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset));
+
+ if (FLAG_debug_code) {
+ __ AbortIfSmi(map);
+ __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset),
+ Immediate(instance_size >> kPointerSizeLog2));
+ __ Assert(equal, "Unexpected instance size");
+ __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset),
+ Immediate(initial_map->pre_allocated_property_fields()));
+ __ Assert(equal, "Unexpected pre-allocated property fields count");
+ __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset),
+ Immediate(initial_map->unused_property_fields()));
+ __ Assert(equal, "Unexpected unused property fields count");
+ __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset),
+ Immediate(initial_map->inobject_properties()));
+ __ Assert(equal, "Unexpected in-object property fields count");
+ }
+
+ // Initialize map and fields of the newly allocated object.
+ ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
+ __ movq(FieldOperand(result, JSObject::kMapOffset), map);
+ __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
+ __ movq(FieldOperand(result, JSObject::kElementsOffset), scratch);
+ __ movq(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
+ if (initial_map->inobject_properties() != 0) {
+ __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
+ for (int i = 0; i < initial_map->inobject_properties(); i++) {
+ int property_offset = JSObject::kHeaderSize + i * kPointerSize;
+ __ movq(FieldOperand(result, property_offset), scratch);
+ }
+ }
+}
+
+
+void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) {
+ Register result = ToRegister(instr->result());
+ Handle<JSFunction> constructor = instr->hydrogen()->constructor();
+ Handle<Map> initial_map(constructor->initial_map());
+ int instance_size = initial_map->instance_size();
+
+ // TODO(3095996): Get rid of this. For now, we need to make the
+ // result register contain a valid pointer because it is already
+ // contained in the register pointer map.
+ __ Set(result, 0);
+
+ PushSafepointRegistersScope scope(this);
+ __ Push(Smi::FromInt(instance_size));
+ CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
+ __ StoreToSafepointRegisterSlot(result, rax);
}
void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
- Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements();
- ASSERT_EQ(2, constant_elements->length());
- ElementsKind constant_elements_kind =
- static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
+ Heap* heap = isolate()->heap();
+ ElementsKind boilerplate_elements_kind =
+ instr->hydrogen()->boilerplate_elements_kind();
+
+ // Deopt if the array literal boilerplate ElementsKind is of a type different
+ // than the expected one. The check isn't necessary if the boilerplate has
+ // already been converted to FAST_ELEMENTS.
+ if (boilerplate_elements_kind != FAST_ELEMENTS) {
+ __ LoadHeapObject(rax, instr->hydrogen()->boilerplate_object());
+ __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
+ // Load the map's "bit field 2".
+ __ movb(rbx, FieldOperand(rbx, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ and_(rbx, Immediate(Map::kElementsKindMask));
+ __ cmpb(rbx, Immediate(boilerplate_elements_kind <<
+ Map::kElementsKindShift));
+ DeoptimizeIf(not_equal, instr->environment());
+ }
- // Setup the parameters to the stub/runtime call.
+ // Set up the parameters to the stub/runtime call.
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
- __ Push(instr->hydrogen()->constant_elements());
+ // Boilerplate already exists, constant elements are never accessed.
+ // Pass an empty fixed array.
+ __ Push(Handle<FixedArray>(heap->empty_fixed_array()));
// Pick the right runtime function or stub to call.
int length = instr->hydrogen()->length();
@@ -3944,28 +4185,160 @@ void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
} else {
FastCloneShallowArrayStub::Mode mode =
- constant_elements_kind == FAST_DOUBLE_ELEMENTS
- ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
- : FastCloneShallowArrayStub::CLONE_ELEMENTS;
+ boilerplate_elements_kind == FAST_DOUBLE_ELEMENTS
+ ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
+ : FastCloneShallowArrayStub::CLONE_ELEMENTS;
FastCloneShallowArrayStub stub(mode, length);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
+void LCodeGen::EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset) {
+ ASSERT(!source.is(rcx));
+ ASSERT(!result.is(rcx));
+
+ // Only elements backing stores for non-COW arrays need to be copied.
+ Handle<FixedArrayBase> elements(object->elements());
+ bool has_elements = elements->length() > 0 &&
+ elements->map() != isolate()->heap()->fixed_cow_array_map();
+
+ // Increase the offset so that subsequent objects end up right after
+ // this object and its backing store.
+ int object_offset = *offset;
+ int object_size = object->map()->instance_size();
+ int elements_offset = *offset + object_size;
+ int elements_size = has_elements ? elements->Size() : 0;
+ *offset += object_size + elements_size;
+
+ // Copy object header.
+ ASSERT(object->properties()->length() == 0);
+ int inobject_properties = object->map()->inobject_properties();
+ int header_size = object_size - inobject_properties * kPointerSize;
+ for (int i = 0; i < header_size; i += kPointerSize) {
+ if (has_elements && i == JSObject::kElementsOffset) {
+ __ lea(rcx, Operand(result, elements_offset));
+ } else {
+ __ movq(rcx, FieldOperand(source, i));
+ }
+ __ movq(FieldOperand(result, object_offset + i), rcx);
+ }
+
+ // Copy in-object properties.
+ for (int i = 0; i < inobject_properties; i++) {
+ int total_offset = object_offset + object->GetInObjectPropertyOffset(i);
+ Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i));
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ lea(rcx, Operand(result, *offset));
+ __ movq(FieldOperand(result, total_offset), rcx);
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
+ __ movq(FieldOperand(result, total_offset), rcx);
+ } else {
+ __ movq(rcx, value, RelocInfo::NONE);
+ __ movq(FieldOperand(result, total_offset), rcx);
+ }
+ }
+
+ if (has_elements) {
+ // Copy elements backing store header.
+ __ LoadHeapObject(source, elements);
+ for (int i = 0; i < FixedArray::kHeaderSize; i += kPointerSize) {
+ __ movq(rcx, FieldOperand(source, i));
+ __ movq(FieldOperand(result, elements_offset + i), rcx);
+ }
+
+ // Copy elements backing store content.
+ int elements_length = elements->length();
+ if (elements->IsFixedDoubleArray()) {
+ Handle<FixedDoubleArray> double_array =
+ Handle<FixedDoubleArray>::cast(elements);
+ for (int i = 0; i < elements_length; i++) {
+ int64_t value = double_array->get_representation(i);
+ int total_offset =
+ elements_offset + FixedDoubleArray::OffsetOfElementAt(i);
+ __ movq(rcx, value, RelocInfo::NONE);
+ __ movq(FieldOperand(result, total_offset), rcx);
+ }
+ } else if (elements->IsFixedArray()) {
+ for (int i = 0; i < elements_length; i++) {
+ int total_offset = elements_offset + FixedArray::OffsetOfElementAt(i);
+ Handle<Object> value = JSObject::GetElement(object, i);
+ if (value->IsJSObject()) {
+ Handle<JSObject> value_object = Handle<JSObject>::cast(value);
+ __ lea(rcx, Operand(result, *offset));
+ __ movq(FieldOperand(result, total_offset), rcx);
+ __ LoadHeapObject(source, value_object);
+ EmitDeepCopy(value_object, result, source, offset);
+ } else if (value->IsHeapObject()) {
+ __ LoadHeapObject(rcx, Handle<HeapObject>::cast(value));
+ __ movq(FieldOperand(result, total_offset), rcx);
+ } else {
+ __ movq(rcx, value, RelocInfo::NONE);
+ __ movq(FieldOperand(result, total_offset), rcx);
+ }
+ }
+ } else {
+ UNREACHABLE();
+ }
+ }
+}
+
+
+void LCodeGen::DoFastLiteral(LFastLiteral* instr) {
+ int size = instr->hydrogen()->total_size();
+
+ // Allocate all objects that are part of the literal in one big
+ // allocation. This avoids multiple limit checks.
+ Label allocated, runtime_allocate;
+ __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
+ __ jmp(&allocated);
+
+ __ bind(&runtime_allocate);
+ __ Push(Smi::FromInt(size));
+ CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
+
+ __ bind(&allocated);
+ int offset = 0;
+ __ LoadHeapObject(rbx, instr->hydrogen()->boilerplate());
+ EmitDeepCopy(instr->hydrogen()->boilerplate(), rax, rbx, &offset);
+ ASSERT_EQ(size, offset);
+}
+
+
void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
- // Setup the parameters to the stub/runtime call.
- __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
+ Handle<FixedArray> literals(instr->environment()->closure()->literals());
+ Handle<FixedArray> constant_properties =
+ instr->hydrogen()->constant_properties();
+
+ // Set up the parameters to the stub/runtime call.
+ __ PushHeapObject(literals);
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
- __ Push(instr->hydrogen()->constant_properties());
- __ Push(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0));
+ __ Push(constant_properties);
+ int flags = instr->hydrogen()->fast_elements()
+ ? ObjectLiteral::kFastElements
+ : ObjectLiteral::kNoFlags;
+ flags |= instr->hydrogen()->has_function()
+ ? ObjectLiteral::kHasFunction
+ : ObjectLiteral::kNoFlags;
+ __ Push(Smi::FromInt(flags));
- // Pick the right runtime function to call.
+ // Pick the right runtime function or stub to call.
+ int properties_count = constant_properties->length() / 2;
if (instr->hydrogen()->depth() > 1) {
CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
- } else {
+ } else if (flags != ObjectLiteral::kFastElements ||
+ properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
+ } else {
+ FastCloneShallowObjectStub stub(properties_count);
+ CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
}
}
@@ -4035,7 +4408,7 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
Handle<SharedFunctionInfo> shared_info = instr->shared_info();
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && shared_info->num_literals() == 0) {
- FastNewClosureStub stub(shared_info->strict_mode_flag());
+ FastNewClosureStub stub(shared_info->language_mode());
__ Push(shared_info);
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
} else {
@@ -4059,7 +4432,12 @@ void LCodeGen::DoTypeof(LTypeof* instr) {
void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
ASSERT(!operand->IsDoubleRegister());
if (operand->IsConstantOperand()) {
- __ Push(ToHandle(LConstantOperand::cast(operand)));
+ Handle<Object> object = ToHandle(LConstantOperand::cast(operand));
+ if (object->IsSmi()) {
+ __ Push(Handle<Smi>::cast(object));
+ } else {
+ __ PushHeapObject(Handle<HeapObject>::cast(object));
+ }
} else if (operand->IsRegister()) {
__ push(ToRegister(operand));
} else {
@@ -4182,9 +4560,24 @@ void LCodeGen::EmitIsConstructCall(Register temp) {
}
+void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ if (current_pc < last_lazy_deopt_pc_ + space_needed) {
+ int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
+ __ Nop(padding_size);
+ }
+}
+
+
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
- // No code for lazy bailout instruction. Used to capture environment after a
- // call for populating the safepoint data with deoptimization data.
+ EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
+ last_lazy_deopt_pc_ = masm()->pc_offset();
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4200,15 +4593,12 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
EmitPushTaggedOperand(key);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
// Create safepoint generator that will also ensure enough space in the
// reloc info for patching in deoptimization (since this is invoking a
// builtin)
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, safepoint_generator);
}
@@ -4221,30 +4611,21 @@ void LCodeGen::DoIn(LIn* instr) {
EmitPushTaggedOperand(obj);
ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
LPointerMap* pointers = instr->pointer_map();
- LEnvironment* env = instr->deoptimization_environment();
RecordPosition(pointers->position());
- RegisterEnvironmentForDeoptimization(env);
- // Create safepoint generator that will also ensure enough space in the
- // reloc info for patching in deoptimization (since this is invoking a
- // builtin)
- SafepointGenerator safepoint_generator(this,
- pointers,
- env->deoptimization_index());
+ SafepointGenerator safepoint_generator(
+ this, pointers, Safepoint::kLazyDeopt);
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
}
void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) {
- {
- PushSafepointRegistersScope scope(this);
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
- RegisterLazyDeoptimization(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
- }
-
- // The gap code includes the restoring of the safepoint registers.
- int pc = masm()->pc_offset();
- safepoints_.SetPcAfterGap(pc);
+ PushSafepointRegistersScope scope(this);
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
+ RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
}
@@ -4259,6 +4640,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
LStackCheck* instr_;
};
+ ASSERT(instr->HasEnvironment());
+ LEnvironment* env = instr->environment();
+ // There is no LLazyBailout instruction for stack-checks. We have to
+ // prepare for lazy deoptimization explicitly here.
if (instr->hydrogen()->is_function_entry()) {
// Perform stack overflow check.
Label done;
@@ -4266,7 +4651,11 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
__ j(above_equal, &done, Label::kNear);
StackCheckStub stub;
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
+ EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
+ last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
} else {
ASSERT(instr->hydrogen()->is_backwards_branch());
// Perform stack overflow check if this goto needs it before jumping.
@@ -4274,8 +4663,14 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
new DeferredStackCheck(this, instr);
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(below, deferred_stack_check->entry());
+ EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
+ last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(instr->done_label());
deferred_stack_check->SetExit(instr->done_label());
+ RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
+ // Don't record a deoptimization index for the safepoint here.
+ // This will be done explicitly when emitting call and the safepoint in
+ // the deferred code.
}
}
@@ -4291,11 +4686,93 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
// If the environment were already registered, we would have no way of
// backpatching it with the spill slot operands.
ASSERT(!environment->HasBeenRegistered());
- RegisterEnvironmentForDeoptimization(environment);
+ RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
ASSERT(osr_pc_offset_ == -1);
osr_pc_offset_ = masm()->pc_offset();
}
+
+void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
+ __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+
+ Register null_value = rdi;
+ __ LoadRoot(null_value, Heap::kNullValueRootIndex);
+ __ cmpq(rax, null_value);
+ DeoptimizeIf(equal, instr->environment());
+
+ Condition cc = masm()->CheckSmi(rax);
+ DeoptimizeIf(cc, instr->environment());
+
+ STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
+ __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
+ DeoptimizeIf(below_equal, instr->environment());
+
+ Label use_cache, call_runtime;
+ __ CheckEnumCache(null_value, &call_runtime);
+
+ __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
+ __ jmp(&use_cache, Label::kNear);
+
+ // Get the set of properties to enumerate.
+ __ bind(&call_runtime);
+ __ push(rax);
+ CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
+
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kMetaMapRootIndex);
+ DeoptimizeIf(not_equal, instr->environment());
+ __ bind(&use_cache);
+}
+
+
+void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
+ Register map = ToRegister(instr->map());
+ Register result = ToRegister(instr->result());
+ __ LoadInstanceDescriptors(map, result);
+ __ movq(result,
+ FieldOperand(result, DescriptorArray::kEnumerationIndexOffset));
+ __ movq(result,
+ FieldOperand(result, FixedArray::SizeFor(instr->idx())));
+ Condition cc = masm()->CheckSmi(result);
+ DeoptimizeIf(cc, instr->environment());
+}
+
+
+void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
+ Register object = ToRegister(instr->value());
+ __ cmpq(ToRegister(instr->map()),
+ FieldOperand(object, HeapObject::kMapOffset));
+ DeoptimizeIf(not_equal, instr->environment());
+}
+
+
+void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
+ Register object = ToRegister(instr->object());
+ Register index = ToRegister(instr->index());
+
+ Label out_of_object, done;
+ __ SmiToInteger32(index, index);
+ __ cmpl(index, Immediate(0));
+ __ j(less, &out_of_object);
+ __ movq(object, FieldOperand(object,
+ index,
+ times_pointer_size,
+ JSObject::kHeaderSize));
+ __ jmp(&done, Label::kNear);
+
+ __ bind(&out_of_object);
+ __ movq(object, FieldOperand(object, JSObject::kPropertiesOffset));
+ __ negl(index);
+ // Index is now equal to out of object property index plus 1.
+ __ movq(object, FieldOperand(object,
+ index,
+ times_pointer_size,
+ FixedArray::kHeaderSize - kPointerSize));
+ __ bind(&done);
+}
+
+
#undef __
} } // namespace v8::internal
diff --git a/src/3rdparty/v8/src/x64/lithium-codegen-x64.h b/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
index f3cb667..f5045b6 100644
--- a/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
+++ b/src/3rdparty/v8/src/x64/lithium-codegen-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -60,6 +60,7 @@ class LCodeGen BASE_EMBEDDED {
status_(UNUSED),
deferred_(8),
osr_pc_offset_(-1),
+ last_lazy_deopt_pc_(0),
resolver_(this),
expected_safepoint_kind_(Safepoint::kSimple) {
PopulateDeoptimizationLiteralsWithInlinedFunctions();
@@ -96,12 +97,17 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredTaggedToI(LTaggedToI* instr);
void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
void DoDeferredStackCheck(LStackCheck* instr);
+ void DoDeferredRandom(LRandom* instr);
void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
- void DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check);
+ void DoDeferredAllocateObject(LAllocateObject* instr);
+ void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
+ Label* map_check);
- // Parallel move support.
+ void DoCheckMapCommon(Register reg, Handle<Map> map,
+ CompareMapMode mode, LEnvironment* env);
+
+// Parallel move support.
void DoParallelMove(LParallelMove* move);
void DoGap(LGap* instr);
@@ -127,7 +133,7 @@ class LCodeGen BASE_EMBEDDED {
bool is_aborted() const { return status_ == ABORTED; }
StrictModeFlag strict_mode_flag() const {
- return info()->strict_mode_flag();
+ return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
}
LChunk* chunk() const { return chunk_; }
@@ -135,7 +141,6 @@ class LCodeGen BASE_EMBEDDED {
HGraph* graph() const { return chunk_->graph(); }
int GetNextEmittedBlock(int block);
- LInstruction* GetNextInstruction();
void EmitClassOfTest(Label* if_true,
Label* if_false,
@@ -198,12 +203,12 @@ class LCodeGen BASE_EMBEDDED {
LInstruction* instr,
CallKind call_kind);
- void LoadHeapObject(Register result, Handle<HeapObject> object);
- void RegisterLazyDeoptimization(LInstruction* instr,
- SafepointMode safepoint_mode,
- int argc);
- void RegisterEnvironmentForDeoptimization(LEnvironment* environment);
+ void RecordSafepointWithLazyDeopt(LInstruction* instr,
+ SafepointMode safepoint_mode,
+ int argc);
+ void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
+ Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition cc, LEnvironment* environment);
void AddToTranslation(Translation* translation,
@@ -230,6 +235,7 @@ class LCodeGen BASE_EMBEDDED {
void DoMathSqrt(LUnaryMathOperation* instr);
void DoMathPowHalf(LUnaryMathOperation* instr);
void DoMathLog(LUnaryMathOperation* instr);
+ void DoMathTan(LUnaryMathOperation* instr);
void DoMathCos(LUnaryMathOperation* instr);
void DoMathSin(LUnaryMathOperation* instr);
@@ -237,16 +243,13 @@ class LCodeGen BASE_EMBEDDED {
void RecordSafepoint(LPointerMap* pointers,
Safepoint::Kind kind,
int arguments,
- int deoptimization_index);
- void RecordSafepoint(LPointerMap* pointers, int deoptimization_index);
- void RecordSafepoint(int deoptimization_index);
+ Safepoint::DeoptMode mode);
+ void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
+ void RecordSafepoint(Safepoint::DeoptMode mode);
void RecordSafepointWithRegisters(LPointerMap* pointers,
int arguments,
- int deoptimization_index);
+ Safepoint::DeoptMode mode);
void RecordPosition(int position);
- int LastSafepointEnd() {
- return static_cast<int>(safepoints_.GetPcAfterGap());
- }
static Condition TokenToCondition(Token::Value op, bool is_unsigned);
void EmitGoto(int block);
@@ -254,6 +257,7 @@ class LCodeGen BASE_EMBEDDED {
void EmitNumberUntagD(Register input,
XMMRegister result,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
@@ -271,6 +275,13 @@ class LCodeGen BASE_EMBEDDED {
Label* is_not_object,
Label* is_object);
+ // Emits optimized code for %_IsString(x). Preserves input register.
+ // Returns the condition on which a final split to
+ // true and false label should be made, to optimize fallthrough.
+ Condition EmitIsString(Register input,
+ Register temp1,
+ Label* is_not_string);
+
// Emits optimized code for %_IsConstructCall().
// Caller should branch on equal condition.
void EmitIsConstructCall(Register temp);
@@ -284,6 +295,13 @@ class LCodeGen BASE_EMBEDDED {
// register, or a stack slot operand.
void EmitPushTaggedOperand(LOperand* operand);
+ // Emits optimized code to deep-copy the contents of statically known
+ // object graphs (e.g. object literal boilerplate).
+ void EmitDeepCopy(Handle<JSObject> object,
+ Register result,
+ Register source,
+ int* offset);
+
struct JumpTableEntry {
explicit inline JumpTableEntry(Address entry)
: label(),
@@ -292,6 +310,8 @@ class LCodeGen BASE_EMBEDDED {
Address address;
};
+ void EnsureSpaceForLazyDeopt(int space_needed);
+
LChunk* const chunk_;
MacroAssembler* const masm_;
CompilationInfo* const info_;
@@ -308,6 +328,7 @@ class LCodeGen BASE_EMBEDDED {
TranslationBuffer translations_;
ZoneList<LDeferredCode*> deferred_;
int osr_pc_offset_;
+ int last_lazy_deopt_pc_;
// Builder that keeps track of safepoints in the code. The table
// itself is emitted at the end of the generated code.
@@ -357,7 +378,7 @@ class LDeferredCode: public ZoneObject {
virtual void Generate() = 0;
virtual LInstruction* instr() = 0;
- void SetExit(Label *exit) { external_exit_ = exit; }
+ void SetExit(Label* exit) { external_exit_ = exit; }
Label* entry() { return &entry_; }
Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
int instruction_index() const { return instruction_index_; }
diff --git a/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc b/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
index c3c617c..877ea8c 100644
--- a/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-gap-resolver-x64.cc
@@ -198,16 +198,18 @@ void LGapResolver::EmitMove(int index) {
if (cgen_->IsInteger32Constant(constant_source)) {
__ movl(dst, Immediate(cgen_->ToInteger32(constant_source)));
} else {
- __ Move(dst, cgen_->ToHandle(constant_source));
+ __ LoadObject(dst, cgen_->ToHandle(constant_source));
}
} else {
ASSERT(destination->IsStackSlot());
Operand dst = cgen_->ToOperand(destination);
if (cgen_->IsInteger32Constant(constant_source)) {
- // Allow top 32 bits of an untagged Integer32 to be arbitrary.
- __ movl(dst, Immediate(cgen_->ToInteger32(constant_source)));
+ // Zero top 32 bits of a 64 bit spill slot that holds a 32 bit untagged
+ // value.
+ __ movq(dst, Immediate(cgen_->ToInteger32(constant_source)));
} else {
- __ Move(dst, cgen_->ToHandle(constant_source));
+ __ LoadObject(kScratchRegister, cgen_->ToHandle(constant_source));
+ __ movq(dst, kScratchRegister);
}
}
diff --git a/src/3rdparty/v8/src/x64/lithium-x64.cc b/src/3rdparty/v8/src/x64/lithium-x64.cc
index fb13cda..d147907 100644
--- a/src/3rdparty/v8/src/x64/lithium-x64.cc
+++ b/src/3rdparty/v8/src/x64/lithium-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -230,6 +230,13 @@ void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
}
+void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if is_string(");
+ InputAt(0)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if is_smi(");
InputAt(0)->PrintTo(stream);
@@ -244,6 +251,14 @@ void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
}
+void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
+ stream->Add("if string_compare(");
+ InputAt(0)->PrintTo(stream);
+ InputAt(1)->PrintTo(stream);
+ stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
+}
+
+
void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
stream->Add("if has_instance_type(");
InputAt(0)->PrintTo(stream);
@@ -367,7 +382,7 @@ LOperand* LChunk::GetNextSpillSlot(bool is_double) {
void LChunk::MarkEmptyBlocks() {
- HPhase phase("Mark empty blocks", this);
+ HPhase phase("L_Mark empty blocks", this);
for (int i = 0; i < graph()->blocks()->length(); ++i) {
HBasicBlock* block = graph()->blocks()->at(i);
int first = block->first_instruction_index();
@@ -454,7 +469,7 @@ void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
- LInstructionGap* gap = new LInstructionGap(block);
+ LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
int index = -1;
if (instr->IsControl()) {
instructions_.Add(gap);
@@ -529,8 +544,8 @@ Representation LChunk::LookupLiteralRepresentation(
LChunk* LChunkBuilder::Build() {
ASSERT(is_unused());
- chunk_ = new LChunk(info(), graph());
- HPhase phase("Building chunk", chunk_);
+ chunk_ = new(zone()) LChunk(info(), graph());
+ HPhase phase("L_Building chunk", chunk_);
status_ = BUILDING;
const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
for (int i = 0; i < blocks->length(); i++) {
@@ -559,20 +574,15 @@ void LChunkBuilder::Abort(const char* format, ...) {
}
-LRegister* LChunkBuilder::ToOperand(Register reg) {
- return LRegister::Create(Register::ToAllocationIndex(reg));
-}
-
-
LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
- return new LUnallocated(LUnallocated::FIXED_REGISTER,
- Register::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
+ Register::ToAllocationIndex(reg));
}
LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
- return new LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
- XMMRegister::ToAllocationIndex(reg));
+ return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
+ XMMRegister::ToAllocationIndex(reg));
}
@@ -587,29 +597,29 @@ LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
LOperand* LChunkBuilder::UseRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
return Use(value,
- new LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
LUnallocated::USED_AT_START));
}
LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::WRITABLE_REGISTER));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
}
LOperand* LChunkBuilder::Use(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE));
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
}
LOperand* LChunkBuilder::UseAtStart(HValue* value) {
- return Use(value, new LUnallocated(LUnallocated::NONE,
+ return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
LUnallocated::USED_AT_START));
}
@@ -645,7 +655,7 @@ LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
LOperand* LChunkBuilder::UseAny(HValue* value) {
return value->IsConstant()
? chunk_->DefineConstantOperand(HConstant::cast(value))
- : Use(value, new LUnallocated(LUnallocated::ANY));
+ : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
}
@@ -654,7 +664,7 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
HInstruction* instr = HInstruction::cast(value);
VisitInstruction(instr);
}
- allocator_->RecordUse(value, operand);
+ operand->set_virtual_register(value->id());
return operand;
}
@@ -662,22 +672,17 @@ LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
template<int I, int T>
LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result) {
- allocator_->RecordDefinition(current_instruction_, result);
+ result->set_virtual_register(current_instruction_->id());
instr->set_result(result);
return instr;
}
template<int I, int T>
-LInstruction* LChunkBuilder::Define(LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::NONE));
-}
-
-
-template<int I, int T>
LInstruction* LChunkBuilder::DefineAsRegister(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
}
@@ -685,14 +690,16 @@ template<int I, int T>
LInstruction* LChunkBuilder::DefineAsSpilled(
LTemplateInstruction<1, I, T>* instr,
int index) {
- return Define(instr, new LUnallocated(LUnallocated::FIXED_SLOT, index));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
}
template<int I, int T>
LInstruction* LChunkBuilder::DefineSameAsFirst(
LTemplateInstruction<1, I, T>* instr) {
- return Define(instr, new LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
+ return Define(instr,
+ new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
}
@@ -775,44 +782,46 @@ LInstruction* LChunkBuilder::MarkAsSaveDoubles(LInstruction* instr) {
LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
ASSERT(!instr->HasPointerMap());
- instr->set_pointer_map(new LPointerMap(position_));
+ instr->set_pointer_map(new(zone()) LPointerMap(position_));
return instr;
}
LUnallocated* LChunkBuilder::TempRegister() {
- LUnallocated* operand = new LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
- allocator_->RecordTemporary(operand);
+ LUnallocated* operand =
+ new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
+ operand->set_virtual_register(allocator_->GetVirtualRegister());
+ if (!allocator_->AllocationOk()) Abort("Not enough virtual registers.");
return operand;
}
LOperand* LChunkBuilder::FixedTemp(Register reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
LUnallocated* operand = ToUnallocated(reg);
- allocator_->RecordTemporary(operand);
+ ASSERT(operand->HasFixedPolicy());
return operand;
}
LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
- return new LLabel(instr->block());
+ return new(zone()) LLabel(instr->block());
}
LInstruction* LChunkBuilder::DoSoftDeoptimize(HSoftDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
- return AssignEnvironment(new LDeoptimize);
+ return AssignEnvironment(new(zone()) LDeoptimize);
}
@@ -824,7 +833,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LArithmeticT* result = new LArithmeticT(op, left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(op, left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -858,7 +867,7 @@ LInstruction* LChunkBuilder::DoShift(Token::Value op,
}
LInstruction* result =
- DefineSameAsFirst(new LShiftI(op, left, right, does_deopt));
+ DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
return does_deopt ? AssignEnvironment(result) : result;
}
@@ -871,7 +880,7 @@ LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
ASSERT(op != Token::MOD);
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- LArithmeticD* result = new LArithmeticD(op, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
return DefineSameAsFirst(result);
}
@@ -889,7 +898,8 @@ LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
ASSERT(right->representation().IsTagged());
LOperand* left_operand = UseFixed(left, rdx);
LOperand* right_operand = UseFixed(right, rax);
- LArithmeticT* result = new LArithmeticT(op, left_operand, right_operand);
+ LArithmeticT* result =
+ new(zone()) LArithmeticT(op, left_operand, right_operand);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -985,14 +995,18 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
LEnvironment* outer =
CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator);
int ast_id = hydrogen_env->ast_id();
- ASSERT(ast_id != AstNode::kNoNumber);
+ ASSERT(ast_id != AstNode::kNoNumber ||
+ hydrogen_env->frame_type() != JS_FUNCTION);
int value_count = hydrogen_env->length();
- LEnvironment* result = new LEnvironment(hydrogen_env->closure(),
- ast_id,
- hydrogen_env->parameter_count(),
- argument_count_,
- value_count,
- outer);
+ LEnvironment* result = new(zone()) LEnvironment(
+ hydrogen_env->closure(),
+ hydrogen_env->frame_type(),
+ ast_id,
+ hydrogen_env->parameter_count(),
+ argument_count_,
+ value_count,
+ outer);
+ int argument_index = *argument_index_accumulator;
for (int i = 0; i < value_count; ++i) {
if (hydrogen_env->is_special_index(i)) continue;
@@ -1001,57 +1015,70 @@ LEnvironment* LChunkBuilder::CreateEnvironment(
if (value->IsArgumentsObject()) {
op = NULL;
} else if (value->IsPushArgument()) {
- op = new LArgument((*argument_index_accumulator)++);
+ op = new(zone()) LArgument(argument_index++);
} else {
op = UseAny(value);
}
result->AddValue(op, value->representation());
}
+ if (hydrogen_env->frame_type() == JS_FUNCTION) {
+ *argument_index_accumulator = argument_index;
+ }
+
return result;
}
LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
- return new LGoto(instr->FirstSuccessor()->block_id());
+ return new(zone()) LGoto(instr->FirstSuccessor()->block_id());
}
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
- HValue* v = instr->value();
- if (v->EmitAtUses()) {
- ASSERT(v->IsConstant());
- ASSERT(!v->representation().IsDouble());
- HBasicBlock* successor = HConstant::cast(v)->ToBoolean()
+ HValue* value = instr->value();
+ if (value->EmitAtUses()) {
+ ASSERT(value->IsConstant());
+ ASSERT(!value->representation().IsDouble());
+ HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
- return new LGoto(successor->block_id());
+ return new(zone()) LGoto(successor->block_id());
}
- return AssignEnvironment(new LBranch(UseRegister(v)));
+
+ LBranch* result = new(zone()) LBranch(UseRegister(value));
+ // Tagged values that are not known smis or booleans require a
+ // deoptimization environment.
+ Representation rep = value->representation();
+ HType type = value->type();
+ if (rep.IsTagged() && !type.IsSmi() && !type.IsBoolean()) {
+ return AssignEnvironment(result);
+ }
+ return result;
}
LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return new LCmpMapAndBranch(value);
+ return new(zone()) LCmpMapAndBranch(value);
}
LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
- return DefineAsRegister(new LArgumentsLength(Use(length->value())));
+ return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
}
LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
- return DefineAsRegister(new LArgumentsElements);
+ return DefineAsRegister(new(zone()) LArgumentsElements);
}
LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LOperand* left = UseFixed(instr->left(), rax);
LOperand* right = UseFixed(instr->right(), rdx);
- LInstanceOf* result = new LInstanceOf(left, right);
+ LInstanceOf* result = new(zone()) LInstanceOf(left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1059,18 +1086,26 @@ LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
HInstanceOfKnownGlobal* instr) {
LInstanceOfKnownGlobal* result =
- new LInstanceOfKnownGlobal(UseFixed(instr->left(), rax),
- FixedTemp(rdi));
+ new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->left(), rax),
+ FixedTemp(rdi));
return MarkAsCall(DefineFixed(result, rax), instr);
}
+LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
+ LOperand* receiver = UseRegister(instr->receiver());
+ LOperand* function = UseRegisterAtStart(instr->function());
+ LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
+ return AssignEnvironment(DefineSameAsFirst(result));
+}
+
+
LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LOperand* function = UseFixed(instr->function(), rdi);
LOperand* receiver = UseFixed(instr->receiver(), rax);
LOperand* length = UseFixed(instr->length(), rbx);
LOperand* elements = UseFixed(instr->elements(), rcx);
- LApplyArguments* result = new LApplyArguments(function,
+ LApplyArguments* result = new(zone()) LApplyArguments(function,
receiver,
length,
elements);
@@ -1081,61 +1116,68 @@ LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
LInstruction* LChunkBuilder::DoPushArgument(HPushArgument* instr) {
++argument_count_;
LOperand* argument = UseOrConstant(instr->argument());
- return new LPushArgument(argument);
+ return new(zone()) LPushArgument(argument);
}
LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LThisFunction);
+ return instr->HasNoUses()
+ ? NULL
+ : DefineAsRegister(new(zone()) LThisFunction);
}
LInstruction* LChunkBuilder::DoContext(HContext* instr) {
- return instr->HasNoUses() ? NULL : DefineAsRegister(new LContext);
+ return instr->HasNoUses() ? NULL : DefineAsRegister(new(zone()) LContext);
}
LInstruction* LChunkBuilder::DoOuterContext(HOuterContext* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LOuterContext(context));
+ return DefineAsRegister(new(zone()) LOuterContext(context));
+}
+
+
+LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
+ return MarkAsCall(new(zone()) LDeclareGlobals, instr);
}
LInstruction* LChunkBuilder::DoGlobalObject(HGlobalObject* instr) {
- return DefineAsRegister(new LGlobalObject(instr->qml_global()));
+ return DefineAsRegister(new(zone()) LGlobalObject);
}
LInstruction* LChunkBuilder::DoGlobalReceiver(HGlobalReceiver* instr) {
LOperand* global_object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGlobalReceiver(global_object));
+ return DefineAsRegister(new(zone()) LGlobalReceiver(global_object));
}
LInstruction* LChunkBuilder::DoCallConstantFunction(
HCallConstantFunction* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallConstantFunction, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallConstantFunction, rax), instr);
}
LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
LOperand* function = UseFixed(instr->function(), rdi);
argument_count_ -= instr->argument_count();
- LInvokeFunction* result = new LInvokeFunction(function);
+ LInvokeFunction* result = new(zone()) LInvokeFunction(function);
return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
}
LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
BuiltinFunctionId op = instr->op();
- if (op == kMathLog || op == kMathSin || op == kMathCos) {
+ if (op == kMathLog || op == kMathSin || op == kMathCos || op == kMathTan) {
LOperand* input = UseFixedDouble(instr->value(), xmm1);
- LUnaryMathOperation* result = new LUnaryMathOperation(input);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
} else {
LOperand* input = UseRegisterAtStart(instr->value());
- LUnaryMathOperation* result = new LUnaryMathOperation(input);
+ LUnaryMathOperation* result = new(zone()) LUnaryMathOperation(input);
switch (op) {
case kMathAbs:
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
@@ -1159,47 +1201,48 @@ LInstruction* LChunkBuilder::DoCallKeyed(HCallKeyed* instr) {
ASSERT(instr->key()->representation().IsTagged());
LOperand* key = UseFixed(instr->key(), rcx);
argument_count_ -= instr->argument_count();
- LCallKeyed* result = new LCallKeyed(key);
+ LCallKeyed* result = new(zone()) LCallKeyed(key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoCallNamed(HCallNamed* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallNamed, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallNamed, rax), instr);
}
LInstruction* LChunkBuilder::DoCallGlobal(HCallGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallGlobal(instr->qml_global()), rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallGlobal, rax), instr);
}
LInstruction* LChunkBuilder::DoCallKnownGlobal(HCallKnownGlobal* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallKnownGlobal, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallKnownGlobal, rax), instr);
}
LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
LOperand* constructor = UseFixed(instr->constructor(), rdi);
argument_count_ -= instr->argument_count();
- LCallNew* result = new LCallNew(constructor);
+ LCallNew* result = new(zone()) LCallNew(constructor);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
+ LOperand* function = UseFixed(instr->function(), rdi);
argument_count_ -= instr->argument_count();
- LCallFunction* result = new LCallFunction();
+ LCallFunction* result = new(zone()) LCallFunction(function);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallRuntime, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallRuntime, rax), instr);
}
@@ -1225,7 +1268,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- return DefineSameAsFirst(new LBitI(left, right));
+ return DefineSameAsFirst(new(zone()) LBitI(left, right));
} else {
ASSERT(instr->representation().IsTagged());
ASSERT(instr->left()->representation().IsTagged());
@@ -1233,7 +1276,7 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LArithmeticT* result = new LArithmeticT(instr->op(), left, right);
+ LArithmeticT* result = new(zone()) LArithmeticT(instr->op(), left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
}
@@ -1243,7 +1286,7 @@ LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
ASSERT(instr->value()->representation().IsInteger32());
ASSERT(instr->representation().IsInteger32());
LOperand* input = UseRegisterAtStart(instr->value());
- LBitNotI* result = new LBitNotI(input);
+ LBitNotI* result = new(zone()) LBitNotI(input);
return DefineSameAsFirst(result);
}
@@ -1257,7 +1300,7 @@ LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
LOperand* temp = FixedTemp(rdx);
LOperand* dividend = UseFixed(instr->left(), rax);
LOperand* divisor = UseRegister(instr->right());
- LDivI* result = new LDivI(dividend, divisor, temp);
+ LDivI* result = new(zone()) LDivI(dividend, divisor, temp);
return AssignEnvironment(DefineFixed(result, rax));
} else {
ASSERT(instr->representation().IsTagged());
@@ -1275,7 +1318,8 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
if (instr->HasPowerOf2Divisor()) {
ASSERT(!instr->CheckFlag(HValue::kCanBeDivByZero));
LOperand* value = UseRegisterAtStart(instr->left());
- LModI* mod = new LModI(value, UseOrConstant(instr->right()), NULL);
+ LModI* mod =
+ new(zone()) LModI(value, UseOrConstant(instr->right()), NULL);
result = DefineSameAsFirst(mod);
} else {
// The temporary operand is necessary to ensure that right is not
@@ -1283,7 +1327,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
LOperand* temp = FixedTemp(rdx);
LOperand* value = UseFixed(instr->left(), rax);
LOperand* divisor = UseRegister(instr->right());
- LModI* mod = new LModI(value, divisor, temp);
+ LModI* mod = new(zone()) LModI(value, divisor, temp);
result = DefineFixed(mod, rdx);
}
@@ -1300,7 +1344,7 @@ LInstruction* LChunkBuilder::DoMod(HMod* instr) {
// TODO(fschneider): Allow any register as input registers.
LOperand* left = UseFixedDouble(instr->left(), xmm2);
LOperand* right = UseFixedDouble(instr->right(), xmm1);
- LArithmeticD* result = new LArithmeticD(Token::MOD, left, right);
+ LArithmeticD* result = new(zone()) LArithmeticD(Token::MOD, left, right);
return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
}
}
@@ -1312,8 +1356,12 @@ LInstruction* LChunkBuilder::DoMul(HMul* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstant(instr->MostConstantOperand());
- LMulI* mul = new LMulI(left, right);
- return AssignEnvironment(DefineSameAsFirst(mul));
+ LMulI* mul = new(zone()) LMulI(left, right);
+ if (instr->CheckFlag(HValue::kCanOverflow) ||
+ instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ AssignEnvironment(mul);
+ }
+ return DefineSameAsFirst(mul);
} else if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::MUL, instr);
} else {
@@ -1329,7 +1377,7 @@ LInstruction* LChunkBuilder::DoSub(HSub* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- LSubI* sub = new LSubI(left, right);
+ LSubI* sub = new(zone()) LSubI(left, right);
LInstruction* result = DefineSameAsFirst(sub);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1350,7 +1398,7 @@ LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterAtStart(instr->LeastConstantOperand());
LOperand* right = UseOrConstantAtStart(instr->MostConstantOperand());
- LAddI* add = new LAddI(left, right);
+ LAddI* add = new(zone()) LAddI(left, right);
LInstruction* result = DefineSameAsFirst(add);
if (instr->CheckFlag(HValue::kCanOverflow)) {
result = AssignEnvironment(result);
@@ -1380,18 +1428,31 @@ LInstruction* LChunkBuilder::DoPower(HPower* instr) {
#else
UseFixed(instr->right(), rdi);
#endif
- LPower* result = new LPower(left, right);
- return MarkAsCall(DefineFixedDouble(result, xmm1), instr,
+ LPower* result = new(zone()) LPower(left, right);
+ return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
CAN_DEOPTIMIZE_EAGERLY);
}
+LInstruction* LChunkBuilder::DoRandom(HRandom* instr) {
+ ASSERT(instr->representation().IsDouble());
+ ASSERT(instr->global_object()->representation().IsTagged());
+#ifdef _WIN64
+ LOperand* global_object = UseFixed(instr->global_object(), rcx);
+#else
+ LOperand* global_object = UseFixed(instr->global_object(), rdi);
+#endif
+ LRandom* result = new(zone()) LRandom(global_object);
+ return MarkAsCall(DefineFixedDouble(result, xmm1), instr);
+}
+
+
LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
LOperand* left = UseFixed(instr->left(), rdx);
LOperand* right = UseFixed(instr->right(), rax);
- LCmpT* result = new LCmpT(left, right);
+ LCmpT* result = new(zone()) LCmpT(left, right);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1404,7 +1465,7 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
ASSERT(instr->right()->representation().IsInteger32());
LOperand* left = UseRegisterOrConstantAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
} else {
ASSERT(r.IsDouble());
ASSERT(instr->left()->representation().IsDouble());
@@ -1418,7 +1479,7 @@ LInstruction* LChunkBuilder::DoCompareIDAndBranch(
left = UseRegisterAtStart(instr->left());
right = UseRegisterAtStart(instr->right());
}
- return new LCmpIDAndBranch(left, right);
+ return new(zone()) LCmpIDAndBranch(left, right);
}
}
@@ -1427,47 +1488,72 @@ LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
HCompareObjectEqAndBranch* instr) {
LOperand* left = UseRegisterAtStart(instr->left());
LOperand* right = UseRegisterAtStart(instr->right());
- return new LCmpObjectEqAndBranch(left, right);
+ return new(zone()) LCmpObjectEqAndBranch(left, right);
}
LInstruction* LChunkBuilder::DoCompareConstantEqAndBranch(
HCompareConstantEqAndBranch* instr) {
- return new LCmpConstantEqAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LCmpConstantEqAndBranch(value);
}
LInstruction* LChunkBuilder::DoIsNilAndBranch(HIsNilAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* temp = instr->kind() == kStrictEquality ? NULL : TempRegister();
- return new LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
+ return new(zone()) LIsNilAndBranch(UseRegisterAtStart(instr->value()), temp);
}
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
+ return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
+}
+
+
+LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
+ ASSERT(instr->value()->representation().IsTagged());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* temp = TempRegister();
+ return new(zone()) LIsStringAndBranch(value, temp);
}
LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsSmiAndBranch(Use(instr->value()));
+ return new(zone()) LIsSmiAndBranch(Use(instr->value()));
}
LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
HIsUndetectableAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LIsUndetectableAndBranch(UseRegisterAtStart(instr->value()),
- TempRegister());
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* temp = TempRegister();
+ return new(zone()) LIsUndetectableAndBranch(value, temp);
+}
+
+
+LInstruction* LChunkBuilder::DoStringCompareAndBranch(
+ HStringCompareAndBranch* instr) {
+
+ ASSERT(instr->left()->representation().IsTagged());
+ ASSERT(instr->right()->representation().IsTagged());
+ LOperand* left = UseFixed(instr->left(), rdx);
+ LOperand* right = UseFixed(instr->right(), rax);
+ LStringCompareAndBranch* result =
+ new(zone()) LStringCompareAndBranch(left, right);
+
+ return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
HHasInstanceTypeAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasInstanceTypeAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasInstanceTypeAndBranch(value);
}
@@ -1476,55 +1562,64 @@ LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LGetCachedArrayIndex(value));
+ return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
}
LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
HHasCachedArrayIndexAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
- return new LHasCachedArrayIndexAndBranch(UseRegisterAtStart(instr->value()));
+ LOperand* value = UseRegisterAtStart(instr->value());
+ return new(zone()) LHasCachedArrayIndexAndBranch(value);
}
LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
HClassOfTestAndBranch* instr) {
- return new LClassOfTestAndBranch(UseTempRegister(instr->value()),
- TempRegister(),
- TempRegister());
+ LOperand* value = UseRegister(instr->value());
+ return new(zone()) LClassOfTestAndBranch(value,
+ TempRegister(),
+ TempRegister());
}
LInstruction* LChunkBuilder::DoJSArrayLength(HJSArrayLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LJSArrayLength(array));
+ return DefineAsRegister(new(zone()) LJSArrayLength(array));
}
LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayBaseLength(array));
+ return DefineAsRegister(new(zone()) LFixedArrayBaseLength(array));
}
LInstruction* LChunkBuilder::DoElementsKind(HElementsKind* instr) {
LOperand* object = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LElementsKind(object));
+ return DefineAsRegister(new(zone()) LElementsKind(object));
}
LInstruction* LChunkBuilder::DoValueOf(HValueOf* instr) {
LOperand* object = UseRegister(instr->value());
- LValueOf* result = new LValueOf(object);
- return AssignEnvironment(DefineSameAsFirst(result));
+ LValueOf* result = new(zone()) LValueOf(object);
+ return DefineSameAsFirst(result);
+}
+
+
+LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
+ LOperand* object = UseFixed(instr->value(), rax);
+ LDateField* result = new LDateField(object, instr->index());
+ return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(
- UseRegisterOrConstantAtStart(instr->index()),
- Use(instr->length())));
+ LOperand* value = UseRegisterOrConstantAtStart(instr->index());
+ LOperand* length = Use(instr->length());
+ return AssignEnvironment(new(zone()) LBoundsCheck(value, length));
}
@@ -1537,7 +1632,7 @@ LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
LInstruction* LChunkBuilder::DoThrow(HThrow* instr) {
LOperand* value = UseFixed(instr->value(), rax);
- return MarkAsCall(new LThrow(value), instr);
+ return MarkAsCall(new(zone()) LThrow(value), instr);
}
@@ -1560,7 +1655,7 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (from.IsTagged()) {
if (to.IsDouble()) {
LOperand* value = UseRegister(instr->value());
- LNumberUntagD* res = new LNumberUntagD(value);
+ LNumberUntagD* res = new(zone()) LNumberUntagD(value);
return AssignEnvironment(DefineAsRegister(res));
} else {
ASSERT(to.IsInteger32());
@@ -1569,10 +1664,10 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
if (needs_check) {
bool truncating = instr->CanTruncateToInt32();
LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
- LTaggedToI* res = new LTaggedToI(value, xmm_temp);
+ LTaggedToI* res = new(zone()) LTaggedToI(value, xmm_temp);
return AssignEnvironment(DefineSameAsFirst(res));
} else {
- return DefineSameAsFirst(new LSmiUntag(value, needs_check));
+ return DefineSameAsFirst(new(zone()) LSmiUntag(value, needs_check));
}
}
} else if (from.IsDouble()) {
@@ -1582,26 +1677,27 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
// Make sure that temp and result_temp are different registers.
LUnallocated* result_temp = TempRegister();
- LNumberTagD* result = new LNumberTagD(value, temp);
+ LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
return AssignPointerMap(Define(result, result_temp));
} else {
ASSERT(to.IsInteger32());
LOperand* value = UseRegister(instr->value());
- return AssignEnvironment(DefineAsRegister(new LDoubleToI(value)));
+ return AssignEnvironment(DefineAsRegister(new(zone()) LDoubleToI(value)));
}
} else if (from.IsInteger32()) {
if (to.IsTagged()) {
HValue* val = instr->value();
LOperand* value = UseRegister(val);
if (val->HasRange() && val->range()->IsInSmiRange()) {
- return DefineSameAsFirst(new LSmiTag(value));
+ return DefineSameAsFirst(new(zone()) LSmiTag(value));
} else {
- LNumberTagI* result = new LNumberTagI(value);
+ LNumberTagI* result = new(zone()) LNumberTagI(value);
return AssignEnvironment(AssignPointerMap(DefineSameAsFirst(result)));
}
} else {
ASSERT(to.IsDouble());
- return DefineAsRegister(new LInteger32ToDouble(Use(instr->value())));
+ LOperand* value = Use(instr->value());
+ return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
}
}
UNREACHABLE();
@@ -1611,39 +1707,39 @@ LInstruction* LChunkBuilder::DoChange(HChange* instr) {
LInstruction* LChunkBuilder::DoCheckNonSmi(HCheckNonSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckNonSmi(value));
+ return AssignEnvironment(new(zone()) LCheckNonSmi(value));
}
LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckInstanceType* result = new LCheckInstanceType(value);
+ LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
LOperand* temp = TempRegister();
- LCheckPrototypeMaps* result = new LCheckPrototypeMaps(temp);
+ LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp);
return AssignEnvironment(result);
}
LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckSmi(value));
+ return AssignEnvironment(new(zone()) LCheckSmi(value));
}
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- return AssignEnvironment(new LCheckFunction(value));
+ return AssignEnvironment(new(zone()) LCheckFunction(value));
}
-LInstruction* LChunkBuilder::DoCheckMap(HCheckMap* instr) {
+LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
- LCheckMap* result = new LCheckMap(value);
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
return AssignEnvironment(result);
}
@@ -1653,62 +1749,36 @@ LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
Representation input_rep = value->representation();
LOperand* reg = UseRegister(value);
if (input_rep.IsDouble()) {
- return DefineAsRegister(new LClampDToUint8(reg,
+ return DefineAsRegister(new(zone()) LClampDToUint8(reg,
TempRegister()));
} else if (input_rep.IsInteger32()) {
- return DefineSameAsFirst(new LClampIToUint8(reg));
+ return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
} else {
ASSERT(input_rep.IsTagged());
// Register allocator doesn't (yet) support allocation of double
// temps. Reserve xmm1 explicitly.
- LClampTToUint8* result = new LClampTToUint8(reg,
- TempRegister(),
- FixedTemp(xmm1));
+ LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
+ TempRegister(),
+ FixedTemp(xmm1));
return AssignEnvironment(DefineSameAsFirst(result));
}
}
-LInstruction* LChunkBuilder::DoToInt32(HToInt32* instr) {
- HValue* value = instr->value();
- Representation input_rep = value->representation();
- LOperand* reg = UseRegister(value);
- if (input_rep.IsDouble()) {
- return AssignEnvironment(DefineAsRegister(new LDoubleToI(reg)));
- } else if (input_rep.IsInteger32()) {
- // Canonicalization should already have removed the hydrogen instruction in
- // this case, since it is a noop.
- UNREACHABLE();
- return NULL;
- } else {
- ASSERT(input_rep.IsTagged());
- LOperand* reg = UseRegister(value);
- // Register allocator doesn't (yet) support allocation of double
- // temps. Reserve xmm1 explicitly.
- LOperand* xmm_temp =
- CpuFeatures::IsSupported(SSE3)
- ? NULL
- : FixedTemp(xmm1);
- return AssignEnvironment(
- DefineSameAsFirst(new LTaggedToI(reg, xmm_temp)));
- }
-}
-
-
LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
- return new LReturn(UseFixed(instr->value(), rax));
+ return new(zone()) LReturn(UseFixed(instr->value(), rax));
}
LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
Representation r = instr->representation();
if (r.IsInteger32()) {
- return DefineAsRegister(new LConstantI);
+ return DefineAsRegister(new(zone()) LConstantI);
} else if (r.IsDouble()) {
LOperand* temp = TempRegister();
- return DefineAsRegister(new LConstantD(temp));
+ return DefineAsRegister(new(zone()) LConstantD(temp));
} else if (r.IsTagged()) {
- return DefineAsRegister(new LConstantT);
+ return DefineAsRegister(new(zone()) LConstantT);
} else {
UNREACHABLE();
return NULL;
@@ -1717,7 +1787,7 @@ LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
- LLoadGlobalCell* result = new LLoadGlobalCell;
+ LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
return instr->RequiresHoleCheck()
? AssignEnvironment(DefineAsRegister(result))
: DefineAsRegister(result);
@@ -1726,31 +1796,35 @@ LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), rax);
- LLoadGlobalGeneric* result = new LLoadGlobalGeneric(global_object);
+ LLoadGlobalGeneric* result = new(zone()) LLoadGlobalGeneric(global_object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
- LStoreGlobalCell* result =
- new LStoreGlobalCell(UseTempRegister(instr->value()),
- TempRegister(),
- TempRegister());
- return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
+ LOperand* value = UseRegister(instr->value());
+ // Use a temp to avoid reloading the cell value address in the case where
+ // we perform a hole check.
+ return instr->RequiresHoleCheck()
+ ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
+ : new(zone()) LStoreGlobalCell(value, NULL);
}
LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
LOperand* global_object = UseFixed(instr->global_object(), rdx);
LOperand* value = UseFixed(instr->value(), rax);
- LStoreGlobalGeneric* result = new LStoreGlobalGeneric(global_object, value);
+ LStoreGlobalGeneric* result = new(zone()) LStoreGlobalGeneric(global_object,
+ value);
return MarkAsCall(result, instr);
}
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadContextSlot(context));
+ LInstruction* result =
+ DefineAsRegister(new(zone()) LLoadContextSlot(context));
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
@@ -1767,14 +1841,15 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
value = UseRegister(instr->value());
temp = NULL;
}
- return new LStoreContextSlot(context, value, temp);
+ LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
+ return instr->RequiresHoleCheck() ? AssignEnvironment(result) : result;
}
LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
ASSERT(instr->representation().IsTagged());
LOperand* obj = UseRegisterAtStart(instr->object());
- return DefineAsRegister(new LLoadNamedField(obj));
+ return DefineAsRegister(new(zone()) LLoadNamedField(obj));
}
@@ -1783,11 +1858,13 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
ASSERT(instr->representation().IsTagged());
if (instr->need_generic()) {
LOperand* obj = UseFixed(instr->object(), rax);
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return MarkAsCall(DefineFixed(result, rax), instr);
} else {
LOperand* obj = UseRegisterAtStart(instr->object());
- LLoadNamedFieldPolymorphic* result = new LLoadNamedFieldPolymorphic(obj);
+ LLoadNamedFieldPolymorphic* result =
+ new(zone()) LLoadNamedFieldPolymorphic(obj);
return AssignEnvironment(DefineAsRegister(result));
}
}
@@ -1795,7 +1872,7 @@ LInstruction* LChunkBuilder::DoLoadNamedFieldPolymorphic(
LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rax);
- LLoadNamedGeneric* result = new LLoadNamedGeneric(object);
+ LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1803,20 +1880,20 @@ LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
HLoadFunctionPrototype* instr) {
return AssignEnvironment(DefineAsRegister(
- new LLoadFunctionPrototype(UseRegister(instr->function()))));
+ new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
}
LInstruction* LChunkBuilder::DoLoadElements(HLoadElements* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadElements(input));
+ return DefineAsRegister(new(zone()) LLoadElements(input));
}
LInstruction* LChunkBuilder::DoLoadExternalArrayPointer(
HLoadExternalArrayPointer* instr) {
LOperand* input = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LLoadExternalArrayPointer(input));
+ return DefineAsRegister(new(zone()) LLoadExternalArrayPointer(input));
}
@@ -1826,8 +1903,9 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastElement(
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineAsRegister(result));
+ LLoadKeyedFastElement* result = new(zone()) LLoadKeyedFastElement(obj, key);
+ if (instr->RequiresHoleCheck()) AssignEnvironment(result);
+ return DefineAsRegister(result);
}
@@ -1838,7 +1916,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LOperand* elements = UseRegisterAtStart(instr->elements());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastDoubleElement* result =
- new LLoadKeyedFastDoubleElement(elements, key);
+ new(zone()) LLoadKeyedFastDoubleElement(elements, key);
return AssignEnvironment(DefineAsRegister(result));
}
@@ -1846,19 +1924,18 @@ LInstruction* LChunkBuilder::DoLoadKeyedFastDoubleElement(
LInstruction* LChunkBuilder::DoLoadKeyedSpecializedArrayElement(
HLoadKeyedSpecializedArrayElement* instr) {
ElementsKind elements_kind = instr->elements_kind();
- Representation representation(instr->representation());
ASSERT(
- (representation.IsInteger32() &&
+ (instr->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->key()->representation().IsInteger32());
LOperand* external_pointer = UseRegister(instr->external_pointer());
LOperand* key = UseRegisterOrConstant(instr->key());
LLoadKeyedSpecializedArrayElement* result =
- new LLoadKeyedSpecializedArrayElement(external_pointer, key);
+ new(zone()) LLoadKeyedSpecializedArrayElement(external_pointer, key);
LInstruction* load_instr = DefineAsRegister(result);
// An unsigned int array load might overflow and cause a deopt, make sure it
// has an environment.
@@ -1871,7 +1948,7 @@ LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rdx);
LOperand* key = UseFixed(instr->key(), rax);
- LLoadKeyedGeneric* result = new LLoadKeyedGeneric(object, key);
+ LLoadKeyedGeneric* result = new(zone()) LLoadKeyedGeneric(object, key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -1890,8 +1967,7 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastElement(
LOperand* key = needs_write_barrier
? UseTempRegister(instr->key())
: UseRegisterOrConstantAtStart(instr->key());
-
- return AssignEnvironment(new LStoreKeyedFastElement(obj, key, val));
+ return new(zone()) LStoreKeyedFastElement(obj, key, val);
}
@@ -1905,19 +1981,18 @@ LInstruction* LChunkBuilder::DoStoreKeyedFastDoubleElement(
LOperand* val = UseTempRegister(instr->value());
LOperand* key = UseRegisterOrConstantAtStart(instr->key());
- return new LStoreKeyedFastDoubleElement(elements, key, val);
+ return new(zone()) LStoreKeyedFastDoubleElement(elements, key, val);
}
LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
HStoreKeyedSpecializedArrayElement* instr) {
- Representation representation(instr->value()->representation());
ElementsKind elements_kind = instr->elements_kind();
ASSERT(
- (representation.IsInteger32() &&
+ (instr->value()->representation().IsInteger32() &&
(elements_kind != EXTERNAL_FLOAT_ELEMENTS) &&
(elements_kind != EXTERNAL_DOUBLE_ELEMENTS)) ||
- (representation.IsDouble() &&
+ (instr->value()->representation().IsDouble() &&
((elements_kind == EXTERNAL_FLOAT_ELEMENTS) ||
(elements_kind == EXTERNAL_DOUBLE_ELEMENTS))));
ASSERT(instr->external_pointer()->representation().IsExternal());
@@ -1932,9 +2007,9 @@ LInstruction* LChunkBuilder::DoStoreKeyedSpecializedArrayElement(
: UseRegister(instr->value());
LOperand* key = UseRegisterOrConstant(instr->key());
- return new LStoreKeyedSpecializedArrayElement(external_pointer,
- key,
- val);
+ return new(zone()) LStoreKeyedSpecializedArrayElement(external_pointer,
+ key,
+ val);
}
@@ -1947,7 +2022,8 @@ LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
ASSERT(instr->key()->representation().IsTagged());
ASSERT(instr->value()->representation().IsTagged());
- LStoreKeyedGeneric* result = new LStoreKeyedGeneric(object, key, value);
+ LStoreKeyedGeneric* result =
+ new(zone()) LStoreKeyedGeneric(object, key, value);
return MarkAsCall(result, instr);
}
@@ -1960,14 +2036,16 @@ LInstruction* LChunkBuilder::DoTransitionElementsKind(
LOperand* new_map_reg = TempRegister();
LOperand* temp_reg = TempRegister();
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, temp_reg);
+ new(zone()) LTransitionElementsKind(object, new_map_reg, temp_reg);
return DefineSameAsFirst(result);
} else {
LOperand* object = UseFixed(instr->object(), rax);
LOperand* fixed_object_reg = FixedTemp(rdx);
LOperand* new_map_reg = FixedTemp(rbx);
LTransitionElementsKind* result =
- new LTransitionElementsKind(object, new_map_reg, fixed_object_reg);
+ new(zone()) LTransitionElementsKind(object,
+ new_map_reg,
+ fixed_object_reg);
return MarkAsCall(DefineFixed(result, rax), instr);
}
}
@@ -1989,7 +2067,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
LOperand* temp = (!instr->is_in_object() || needs_write_barrier)
? TempRegister() : NULL;
- return new LStoreNamedField(obj, val, temp);
+ return new(zone()) LStoreNamedField(obj, val, temp);
}
@@ -1997,7 +2075,7 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LOperand* object = UseFixed(instr->object(), rdx);
LOperand* value = UseFixed(instr->value(), rax);
- LStoreNamedGeneric* result = new LStoreNamedGeneric(object, value);
+ LStoreNamedGeneric* result = new(zone()) LStoreNamedGeneric(object, value);
return MarkAsCall(result, instr);
}
@@ -2005,55 +2083,67 @@ LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
LOperand* left = UseOrConstantAtStart(instr->left());
LOperand* right = UseOrConstantAtStart(instr->right());
- return MarkAsCall(DefineFixed(new LStringAdd(left, right), rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LStringAdd(left, right), rax),
+ instr);
}
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
LOperand* string = UseTempRegister(instr->string());
LOperand* index = UseTempRegister(instr->index());
- LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
+ LStringCharCodeAt* result = new(zone()) LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
LOperand* char_code = UseRegister(instr->value());
- LStringCharFromCode* result = new LStringCharFromCode(char_code);
+ LStringCharFromCode* result = new(zone()) LStringCharFromCode(char_code);
return AssignPointerMap(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LStringLength(string));
+ return DefineAsRegister(new(zone()) LStringLength(string));
+}
+
+
+LInstruction* LChunkBuilder::DoAllocateObject(HAllocateObject* instr) {
+ LAllocateObject* result = new LAllocateObject(TempRegister());
+ return AssignPointerMap(DefineAsRegister(result));
+}
+
+
+LInstruction* LChunkBuilder::DoFastLiteral(HFastLiteral* instr) {
+ return MarkAsCall(DefineFixed(new(zone()) LFastLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoArrayLiteral(HArrayLiteral* instr) {
- return MarkAsCall(DefineFixed(new LArrayLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LArrayLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoObjectLiteral(HObjectLiteral* instr) {
- return MarkAsCall(DefineFixed(new LObjectLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LObjectLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
- return MarkAsCall(DefineFixed(new LRegExpLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LRegExpLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
- return MarkAsCall(DefineFixed(new LFunctionLiteral, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LFunctionLiteral, rax), instr);
}
LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
- LDeleteProperty* result =
- new LDeleteProperty(UseAtStart(instr->object()),
- UseOrConstantAtStart(instr->key()));
+ LOperand* object = UseAtStart(instr->object());
+ LOperand* key = UseOrConstantAtStart(instr->key());
+ LDeleteProperty* result = new(zone()) LDeleteProperty(object, key);
return MarkAsCall(DefineFixed(result, rax), instr);
}
@@ -2061,13 +2151,13 @@ LInstruction* LChunkBuilder::DoDeleteProperty(HDeleteProperty* instr) {
LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
allocator_->MarkAsOsrEntry();
current_block_->last_environment()->set_ast_id(instr->ast_id());
- return AssignEnvironment(new LOsrEntry);
+ return AssignEnvironment(new(zone()) LOsrEntry);
}
LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
int spill_index = chunk()->GetParameterStackSlot(instr->index());
- return DefineAsSpilled(new LParameter, spill_index);
+ return DefineAsSpilled(new(zone()) LParameter, spill_index);
}
@@ -2077,13 +2167,13 @@ LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
Abort("Too many spill slots needed for OSR");
spill_index = 0;
}
- return DefineAsSpilled(new LUnknownOSRValue, spill_index);
+ return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
}
LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
argument_count_ -= instr->argument_count();
- return MarkAsCall(DefineFixed(new LCallStub, rax), instr);
+ return MarkAsCall(DefineFixed(new(zone()) LCallStub, rax), instr);
}
@@ -2100,32 +2190,33 @@ LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
LOperand* arguments = UseRegister(instr->arguments());
LOperand* length = UseTempRegister(instr->length());
LOperand* index = Use(instr->index());
- LAccessArgumentsAt* result = new LAccessArgumentsAt(arguments, length, index);
+ LAccessArgumentsAt* result =
+ new(zone()) LAccessArgumentsAt(arguments, length, index);
return AssignEnvironment(DefineAsRegister(result));
}
LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
LOperand* object = UseFixed(instr->value(), rax);
- LToFastProperties* result = new LToFastProperties(object);
+ LToFastProperties* result = new(zone()) LToFastProperties(object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
- LTypeof* result = new LTypeof(UseAtStart(instr->value()));
+ LTypeof* result = new(zone()) LTypeof(UseAtStart(instr->value()));
return MarkAsCall(DefineFixed(result, rax), instr);
}
LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
- return new LTypeofIsAndBranch(UseTempRegister(instr->value()));
+ return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
}
LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
HIsConstructCallAndBranch* instr) {
- return new LIsConstructCallAndBranch(TempRegister());
+ return new(zone()) LIsConstructCallAndBranch(TempRegister());
}
@@ -2148,7 +2239,7 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
- LLazyBailout* lazy_bailout = new LLazyBailout;
+ LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
instruction_pending_deoptimization_environment_->
set_deoptimization_environment(result->environment());
@@ -2162,10 +2253,10 @@ LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
if (instr->is_function_entry()) {
- return MarkAsCall(new LStackCheck, instr);
+ return MarkAsCall(new(zone()) LStackCheck, instr);
} else {
ASSERT(instr->is_backwards_branch());
- return AssignEnvironment(AssignPointerMap(new LStackCheck));
+ return AssignEnvironment(AssignPointerMap(new(zone()) LStackCheck));
}
}
@@ -2174,9 +2265,14 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
HEnvironment* outer = current_block_->last_environment();
HConstant* undefined = graph()->GetConstantUndefined();
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
+ instr->arguments_count(),
instr->function(),
undefined,
- instr->call_kind());
+ instr->call_kind(),
+ instr->is_construct());
+ if (instr->arguments() != NULL) {
+ inner->Bind(instr->arguments(), graph()->GetArgumentsObject());
+ }
current_block_->UpdateEnvironment(inner);
chunk_->AddInlinedClosure(instr->closure());
return NULL;
@@ -2184,7 +2280,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
- HEnvironment* outer = current_block_->last_environment()->outer();
+ HEnvironment* outer = current_block_->last_environment()->
+ DiscardInlined(false);
current_block_->UpdateEnvironment(outer);
return NULL;
}
@@ -2193,11 +2290,39 @@ LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
LInstruction* LChunkBuilder::DoIn(HIn* instr) {
LOperand* key = UseOrConstantAtStart(instr->key());
LOperand* object = UseOrConstantAtStart(instr->object());
- LIn* result = new LIn(key, object);
+ LIn* result = new(zone()) LIn(key, object);
return MarkAsCall(DefineFixed(result, rax), instr);
}
+LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
+ LOperand* object = UseFixed(instr->enumerable(), rax);
+ LForInPrepareMap* result = new(zone()) LForInPrepareMap(object);
+ return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
+}
+
+
+LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
+ LOperand* map = UseRegister(instr->map());
+ return AssignEnvironment(DefineAsRegister(
+ new(zone()) LForInCacheArray(map)));
+}
+
+
+LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
+ LOperand* value = UseRegisterAtStart(instr->value());
+ LOperand* map = UseRegisterAtStart(instr->map());
+ return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
+}
+
+
+LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
+ LOperand* object = UseRegister(instr->object());
+ LOperand* index = UseTempRegister(instr->index());
+ return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index));
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
diff --git a/src/3rdparty/v8/src/x64/lithium-x64.h b/src/3rdparty/v8/src/x64/lithium-x64.h
index 9156162..99f28f0 100644
--- a/src/3rdparty/v8/src/x64/lithium-x64.h
+++ b/src/3rdparty/v8/src/x64/lithium-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -49,6 +49,7 @@ class LCodeGen;
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V) \
V(AccessArgumentsAt) \
V(AddI) \
+ V(AllocateObject) \
V(ApplyArguments) \
V(ArgumentsElements) \
V(ArgumentsLength) \
@@ -70,7 +71,7 @@ class LCodeGen;
V(CallStub) \
V(CheckFunction) \
V(CheckInstanceType) \
- V(CheckMap) \
+ V(CheckMaps) \
V(CheckNonSmi) \
V(CheckPrototypeMaps) \
V(CheckSmi) \
@@ -87,11 +88,13 @@ class LCodeGen;
V(ConstantI) \
V(ConstantT) \
V(Context) \
+ V(DeclareGlobals) \
V(DeleteProperty) \
V(Deoptimize) \
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
+ V(FastLiteral) \
V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
@@ -109,8 +112,10 @@ class LCodeGen;
V(IsConstructCallAndBranch) \
V(IsNilAndBranch) \
V(IsObjectAndBranch) \
+ V(IsStringAndBranch) \
V(IsSmiAndBranch) \
V(IsUndetectableAndBranch) \
+ V(StringCompareAndBranch) \
V(JSArrayLength) \
V(Label) \
V(LazyBailout) \
@@ -138,6 +143,7 @@ class LCodeGen;
V(Parameter) \
V(Power) \
V(PushArgument) \
+ V(Random) \
V(RegExpLiteral) \
V(Return) \
V(ShiftI) \
@@ -167,7 +173,13 @@ class LCodeGen;
V(TypeofIsAndBranch) \
V(UnaryMathOperation) \
V(UnknownOSRValue) \
- V(ValueOf)
+ V(ValueOf) \
+ V(ForInPrepareMap) \
+ V(ForInCacheArray) \
+ V(CheckMapValue) \
+ V(LoadFieldByIndex) \
+ V(DateField) \
+ V(WrapReceiver)
#define DECLARE_CONCRETE_INSTRUCTION(type, mnemonic) \
@@ -458,6 +470,20 @@ class LControlInstruction: public LTemplateInstruction<0, I, T> {
};
+class LWrapReceiver: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LWrapReceiver(LOperand* receiver, LOperand* function) {
+ inputs_[0] = receiver;
+ inputs_[1] = function;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(WrapReceiver, "wrap-receiver")
+
+ LOperand* receiver() { return inputs_[0]; }
+ LOperand* function() { return inputs_[1]; }
+};
+
+
class LApplyArguments: public LTemplateInstruction<1, 4, 0> {
public:
LApplyArguments(LOperand* function,
@@ -640,6 +666,20 @@ class LIsObjectAndBranch: public LControlInstruction<1, 0> {
};
+class LIsStringAndBranch: public LControlInstruction<1, 1> {
+ public:
+ explicit LIsStringAndBranch(LOperand* value, LOperand* temp) {
+ inputs_[0] = value;
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(IsStringAndBranch, "is-string-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(IsStringAndBranch)
+
+ virtual void PrintDataTo(StringStream* stream);
+};
+
+
class LIsSmiAndBranch: public LControlInstruction<1, 0> {
public:
explicit LIsSmiAndBranch(LOperand* value) {
@@ -668,6 +708,23 @@ class LIsUndetectableAndBranch: public LControlInstruction<1, 1> {
};
+class LStringCompareAndBranch: public LControlInstruction<2, 0> {
+ public:
+ explicit LStringCompareAndBranch(LOperand* left, LOperand* right) {
+ inputs_[0] = left;
+ inputs_[1] = right;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(StringCompareAndBranch,
+ "string-compare-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(StringCompareAndBranch)
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ Token::Value op() const { return hydrogen()->token(); }
+};
+
+
class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
public:
explicit LHasInstanceTypeAndBranch(LOperand* value) {
@@ -946,6 +1003,22 @@ class LValueOf: public LTemplateInstruction<1, 1, 0> {
};
+class LDateField: public LTemplateInstruction<1, 1, 0> {
+ public:
+ LDateField(LOperand* date, Smi* index) : index_(index) {
+ inputs_[0] = date;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(ValueOf, "date-field")
+ DECLARE_HYDROGEN_ACCESSOR(ValueOf)
+
+ Smi* index() const { return index_; }
+
+ private:
+ Smi* index_;
+};
+
+
class LThrow: public LTemplateInstruction<0, 1, 0> {
public:
explicit LThrow(LOperand* value) {
@@ -990,6 +1063,17 @@ class LPower: public LTemplateInstruction<1, 2, 0> {
};
+class LRandom: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LRandom(LOperand* global_object) {
+ inputs_[0] = global_object;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(Random, "random")
+ DECLARE_HYDROGEN_ACCESSOR(Random)
+};
+
+
class LArithmeticD: public LTemplateInstruction<1, 2, 0> {
public:
LArithmeticD(Token::Value op, LOperand* left, LOperand* right)
@@ -1197,16 +1281,17 @@ class LLoadGlobalGeneric: public LTemplateInstruction<1, 1, 0> {
};
-class LStoreGlobalCell: public LTemplateInstruction<0, 1, 2> {
+class LStoreGlobalCell: public LTemplateInstruction<0, 1, 1> {
public:
- explicit LStoreGlobalCell(LOperand* value, LOperand* temp1, LOperand* temp2) {
+ explicit LStoreGlobalCell(LOperand* value, LOperand* temp) {
inputs_[0] = value;
- temps_[0] = temp1;
- temps_[1] = temp2;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(StoreGlobalCell, "store-global-cell")
DECLARE_HYDROGEN_ACCESSOR(StoreGlobalCell)
+
+ LOperand* value() { return inputs_[0]; }
};
@@ -1224,7 +1309,7 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* global_object() { return InputAt(0); }
Handle<Object> name() const { return hydrogen()->name(); }
LOperand* value() { return InputAt(1); }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1298,15 +1383,16 @@ class LOuterContext: public LTemplateInstruction<1, 1, 0> {
};
-class LGlobalObject: public LTemplateInstruction<1, 0, 0> {
+class LDeclareGlobals: public LTemplateInstruction<0, 0, 0> {
public:
- explicit LGlobalObject(bool qml_global) : qml_global_(qml_global) {}
+ DECLARE_CONCRETE_INSTRUCTION(DeclareGlobals, "declare-globals")
+ DECLARE_HYDROGEN_ACCESSOR(DeclareGlobals)
+};
- DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
+class LGlobalObject: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(GlobalObject, "global-object")
};
@@ -1380,14 +1466,17 @@ class LCallNamed: public LTemplateInstruction<1, 0, 0> {
};
-class LCallFunction: public LTemplateInstruction<1, 0, 0> {
+class LCallFunction: public LTemplateInstruction<1, 1, 0> {
public:
- LCallFunction() {}
+ explicit LCallFunction(LOperand* function) {
+ inputs_[0] = function;
+ }
DECLARE_CONCRETE_INSTRUCTION(CallFunction, "call-function")
DECLARE_HYDROGEN_ACCESSOR(CallFunction)
- int arity() const { return hydrogen()->argument_count() - 2; }
+ LOperand* function() { return inputs_[0]; }
+ int arity() const { return hydrogen()->argument_count() - 1; }
};
@@ -1396,16 +1485,10 @@ class LCallGlobal: public LTemplateInstruction<1, 0, 0> {
DECLARE_CONCRETE_INSTRUCTION(CallGlobal, "call-global")
DECLARE_HYDROGEN_ACCESSOR(CallGlobal)
- explicit LCallGlobal(bool qml_global) : qml_global_(qml_global) {}
-
virtual void PrintDataTo(StringStream* stream);
Handle<String> name() const {return hydrogen()->name(); }
int arity() const { return hydrogen()->argument_count() - 1; }
-
- bool qml_global() { return qml_global_; }
- private:
- bool qml_global_;
};
@@ -1582,7 +1665,6 @@ class LStoreNamedGeneric: public LTemplateInstruction<0, 2, 0> {
LOperand* value() { return inputs_[1]; }
Handle<Object> name() const { return hydrogen()->name(); }
StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
- bool strict_mode() { return strict_mode_flag() == kStrictMode; }
};
@@ -1667,7 +1749,7 @@ class LStoreKeyedGeneric: public LTemplateInstruction<0, 3, 0> {
LOperand* object() { return inputs_[0]; }
LOperand* key() { return inputs_[1]; }
LOperand* value() { return inputs_[2]; }
- bool strict_mode() { return hydrogen()->strict_mode(); }
+ StrictModeFlag strict_mode_flag() { return hydrogen()->strict_mode_flag(); }
};
@@ -1757,6 +1839,8 @@ class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
inputs_[0] = value;
}
+ LOperand* value() { return InputAt(0); }
+
DECLARE_CONCRETE_INSTRUCTION(CheckFunction, "check-function")
DECLARE_HYDROGEN_ACCESSOR(CheckFunction)
};
@@ -1773,14 +1857,14 @@ class LCheckInstanceType: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckMap: public LTemplateInstruction<0, 1, 0> {
+class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
public:
- explicit LCheckMap(LOperand* value) {
+ explicit LCheckMaps(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(CheckMap, "check-map")
- DECLARE_HYDROGEN_ACCESSOR(CheckMap)
+ DECLARE_CONCRETE_INSTRUCTION(CheckMaps, "check-maps")
+ DECLARE_HYDROGEN_ACCESSOR(CheckMaps)
};
@@ -1859,6 +1943,24 @@ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
};
+class LAllocateObject: public LTemplateInstruction<1, 0, 1> {
+ public:
+ explicit LAllocateObject(LOperand* temp) {
+ temps_[0] = temp;
+ }
+
+ DECLARE_CONCRETE_INSTRUCTION(AllocateObject, "allocate-object")
+ DECLARE_HYDROGEN_ACCESSOR(AllocateObject)
+};
+
+
+class LFastLiteral: public LTemplateInstruction<1, 0, 0> {
+ public:
+ DECLARE_CONCRETE_INSTRUCTION(FastLiteral, "fast-literal")
+ DECLARE_HYDROGEN_ACCESSOR(FastLiteral)
+};
+
+
class LArrayLiteral: public LTemplateInstruction<1, 0, 0> {
public:
DECLARE_CONCRETE_INSTRUCTION(ArrayLiteral, "array-literal")
@@ -1986,6 +2088,62 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
};
+class LForInPrepareMap: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInPrepareMap(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInPrepareMap, "for-in-prepare-map")
+};
+
+
+class LForInCacheArray: public LTemplateInstruction<1, 1, 0> {
+ public:
+ explicit LForInCacheArray(LOperand* map) {
+ inputs_[0] = map;
+ }
+
+ LOperand* map() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(ForInCacheArray, "for-in-cache-array")
+
+ int idx() {
+ return HForInCacheArray::cast(this->hydrogen_value())->idx();
+ }
+};
+
+
+class LCheckMapValue: public LTemplateInstruction<0, 2, 0> {
+ public:
+ LCheckMapValue(LOperand* value, LOperand* map) {
+ inputs_[0] = value;
+ inputs_[1] = map;
+ }
+
+ LOperand* value() { return inputs_[0]; }
+ LOperand* map() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CheckMapValue, "check-map-value")
+};
+
+
+class LLoadFieldByIndex: public LTemplateInstruction<1, 2, 0> {
+ public:
+ LLoadFieldByIndex(LOperand* object, LOperand* index) {
+ inputs_[0] = object;
+ inputs_[1] = index;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+ LOperand* index() { return inputs_[1]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(LoadFieldByIndex, "load-field-by-index")
+};
+
+
class LChunkBuilder;
class LChunk: public ZoneObject {
public:
@@ -2059,6 +2217,7 @@ class LChunkBuilder BASE_EMBEDDED {
: chunk_(NULL),
info_(info),
graph_(graph),
+ zone_(graph->isolate()->zone()),
status_(UNUSED),
current_instruction_(NULL),
current_block_(NULL),
@@ -2088,6 +2247,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk() const { return chunk_; }
CompilationInfo* info() const { return info_; }
HGraph* graph() const { return graph_; }
+ Zone* zone() const { return zone_; }
bool is_unused() const { return status_ == UNUSED; }
bool is_building() const { return status_ == BUILDING; }
@@ -2097,7 +2257,6 @@ class LChunkBuilder BASE_EMBEDDED {
void Abort(const char* format, ...);
// Methods for getting operands for Use / Define / Temp.
- LRegister* ToOperand(Register reg);
LUnallocated* ToUnallocated(Register reg);
LUnallocated* ToUnallocated(XMMRegister reg);
@@ -2148,8 +2307,6 @@ class LChunkBuilder BASE_EMBEDDED {
LInstruction* Define(LTemplateInstruction<1, I, T>* instr,
LUnallocated* result);
template<int I, int T>
- LInstruction* Define(LTemplateInstruction<1, I, T>* instr);
- template<int I, int T>
LInstruction* DefineAsRegister(LTemplateInstruction<1, I, T>* instr);
template<int I, int T>
LInstruction* DefineAsSpilled(LTemplateInstruction<1, I, T>* instr,
@@ -2199,6 +2356,7 @@ class LChunkBuilder BASE_EMBEDDED {
LChunk* chunk_;
CompilationInfo* info_;
HGraph* const graph_;
+ Zone* zone_;
Status status_;
HInstruction* current_instruction_;
HBasicBlock* current_block_;
diff --git a/src/3rdparty/v8/src/x64/macro-assembler-x64.cc b/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
index 5b81fa6..f7db250 100644
--- a/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/macro-assembler-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -495,7 +495,7 @@ void MacroAssembler::Abort(const char* msg) {
// from the real pointer as a smi.
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
- // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
+ // Note: p0 might not be a valid Smi _value_, but it has a valid Smi tag.
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
#ifdef DEBUG
if (msg != NULL) {
@@ -799,8 +799,15 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
}
-static const Register saved_regs[] =
- { rax, rcx, rdx, rbx, rbp, rsi, rdi, r8, r9, r10, r11 };
+#define REG(Name) { kRegister_ ## Name ## _Code }
+
+static const Register saved_regs[] = {
+ REG(rax), REG(rcx), REG(rdx), REG(rbx), REG(rbp), REG(rsi), REG(rdi), REG(r8),
+ REG(r9), REG(r10), REG(r11)
+};
+
+#undef REG
+
static const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register);
@@ -2114,7 +2121,7 @@ void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(
movzxbl(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
movzxbl(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
- // Check that both are flat ascii strings.
+ // Check that both are flat ASCII strings.
ASSERT(kNotStringTag != 0);
const int kFlatAsciiStringMask =
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
@@ -2160,7 +2167,7 @@ void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialAscii(
movq(scratch1, first_object_instance_type);
movq(scratch2, second_object_instance_type);
- // Check that both are flat ascii strings.
+ // Check that both are flat ASCII strings.
ASSERT(kNotStringTag != 0);
const int kFlatAsciiStringMask =
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
@@ -2238,6 +2245,43 @@ void MacroAssembler::Push(Handle<Object> source) {
}
+void MacroAssembler::LoadHeapObject(Register result,
+ Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ movq(result, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ movq(result, Operand(result, 0));
+ } else {
+ Move(result, object);
+ }
+}
+
+
+void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
+ if (isolate()->heap()->InNewSpace(*object)) {
+ Handle<JSGlobalPropertyCell> cell =
+ isolate()->factory()->NewJSGlobalPropertyCell(object);
+ movq(kScratchRegister, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ movq(kScratchRegister, Operand(kScratchRegister, 0));
+ push(kScratchRegister);
+ } else {
+ Push(object);
+ }
+}
+
+
+void MacroAssembler::LoadGlobalCell(Register dst,
+ Handle<JSGlobalPropertyCell> cell) {
+ if (dst.is(rax)) {
+ load_rax(cell.location(), RelocInfo::GLOBAL_PROPERTY_CELL);
+ } else {
+ movq(dst, cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ movq(dst, Operand(dst, 0));
+ }
+}
+
+
void MacroAssembler::Push(Smi* source) {
intptr_t smi = reinterpret_cast<intptr_t>(source);
if (is_int32(smi)) {
@@ -2381,7 +2425,8 @@ void MacroAssembler::Dropad() {
// Order general registers are pushed by Pushad:
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
-int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
+const int
+MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
0,
1,
2,
@@ -2416,146 +2461,146 @@ Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
}
-void MacroAssembler::PushTryHandler(CodeLocation try_location,
- HandlerType type) {
+void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
+ int handler_index) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
-
- // The pc (return address) is already on TOS. This code pushes state,
- // frame pointer, context, and current handler.
- if (try_location == IN_JAVASCRIPT) {
- if (type == TRY_CATCH_HANDLER) {
- push(Immediate(StackHandler::TRY_CATCH));
- } else {
- push(Immediate(StackHandler::TRY_FINALLY));
- }
- push(rbp);
- push(rsi);
- } else {
- ASSERT(try_location == IN_JS_ENTRY);
- // The frame pointer does not point to a JS frame so we save NULL
- // for rbp. We expect the code throwing an exception to check rbp
- // before dereferencing it to restore the context.
- push(Immediate(StackHandler::ENTRY));
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // We will build up the handler from the bottom by pushing on the stack.
+ // First push the frame pointer and context.
+ if (kind == StackHandler::JS_ENTRY) {
+ // The frame pointer does not point to a JS frame so we save NULL for
+ // rbp. We expect the code throwing an exception to check rbp before
+ // dereferencing it to restore the context.
push(Immediate(0)); // NULL frame pointer.
Push(Smi::FromInt(0)); // No context.
+ } else {
+ push(rbp);
+ push(rsi);
}
- // Save the current handler.
- Operand handler_operand =
- ExternalOperand(ExternalReference(Isolate::kHandlerAddress, isolate()));
- push(handler_operand);
- // Link this handler.
- movq(handler_operand, rsp);
+
+ // Push the state and the code object.
+ unsigned state =
+ StackHandler::IndexField::encode(handler_index) |
+ StackHandler::KindField::encode(kind);
+ push(Immediate(state));
+ Push(CodeObject());
+
+ // Link the current handler as the next handler.
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
+ push(ExternalOperand(handler_address));
+ // Set this new handler as the current one.
+ movq(ExternalOperand(handler_address), rsp);
}
void MacroAssembler::PopTryHandler() {
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
- // Unlink this handler.
- Operand handler_operand =
- ExternalOperand(ExternalReference(Isolate::kHandlerAddress, isolate()));
- pop(handler_operand);
- // Remove the remaining fields.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
+ pop(ExternalOperand(handler_address));
addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
}
+void MacroAssembler::JumpToHandlerEntry() {
+ // Compute the handler entry address and jump to it. The handler table is
+ // a fixed array of (smi-tagged) code offsets.
+ // rax = exception, rdi = code object, rdx = state.
+ movq(rbx, FieldOperand(rdi, Code::kHandlerTableOffset));
+ shr(rdx, Immediate(StackHandler::kKindWidth));
+ movq(rdx, FieldOperand(rbx, rdx, times_8, FixedArray::kHeaderSize));
+ SmiToInteger64(rdx, rdx);
+ lea(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
+ jmp(rdi);
+}
+
+
void MacroAssembler::Throw(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // Keep thrown value in rax.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in rax.
if (!value.is(rax)) {
movq(rax, value);
}
-
+ // Drop the stack pointer to the top of the top handler.
ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
- Operand handler_operand = ExternalOperand(handler_address);
- movq(rsp, handler_operand);
- // get next in chain
- pop(handler_operand);
+ movq(rsp, ExternalOperand(handler_address));
+ // Restore the next handler.
+ pop(ExternalOperand(handler_address));
+
+ // Remove the code object and state, compute the handler address in rdi.
+ pop(rdi); // Code object.
+ pop(rdx); // Offset and state.
+
+ // Restore the context and frame pointer.
pop(rsi); // Context.
pop(rbp); // Frame pointer.
- pop(rdx); // State.
// If the handler is a JS frame, restore the context to the frame.
- // (rdx == ENTRY) == (rbp == 0) == (rsi == 0), so we could test any
- // of them.
+ // (kind == ENTRY) == (rbp == 0) == (rsi == 0), so we could test either
+ // rbp or rsi.
Label skip;
- cmpq(rdx, Immediate(StackHandler::ENTRY));
- j(equal, &skip, Label::kNear);
+ testq(rsi, rsi);
+ j(zero, &skip, Label::kNear);
movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
bind(&skip);
- ret(0);
+ JumpToHandlerEntry();
}
-void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
- Register value) {
+void MacroAssembler::ThrowUncatchable(Register value) {
// Adjust this code if not the case.
STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
- // Keep thrown value in rax.
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
+
+ // The exception is expected in rax.
if (!value.is(rax)) {
movq(rax, value);
}
- // Fetch top stack handler.
+ // Drop the stack pointer to the top of the top stack handler.
ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
Load(rsp, handler_address);
- // Unwind the handlers until the ENTRY handler is found.
- Label loop, done;
- bind(&loop);
- // Load the type of the current stack handler.
- const int kStateOffset = StackHandlerConstants::kStateOffset;
- cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
- j(equal, &done, Label::kNear);
- // Fetch the next handler in the list.
- const int kNextOffset = StackHandlerConstants::kNextOffset;
- movq(rsp, Operand(rsp, kNextOffset));
- jmp(&loop);
- bind(&done);
+ // Unwind the handlers until the top ENTRY handler is found.
+ Label fetch_next, check_kind;
+ jmp(&check_kind, Label::kNear);
+ bind(&fetch_next);
+ movq(rsp, Operand(rsp, StackHandlerConstants::kNextOffset));
- // Set the top handler address to next handler past the current ENTRY handler.
- Operand handler_operand = ExternalOperand(handler_address);
- pop(handler_operand);
+ bind(&check_kind);
+ STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
+ testl(Operand(rsp, StackHandlerConstants::kStateOffset),
+ Immediate(StackHandler::KindField::kMask));
+ j(not_zero, &fetch_next);
- if (type == OUT_OF_MEMORY) {
- // Set external caught exception to false.
- ExternalReference external_caught(
- Isolate::kExternalCaughtExceptionAddress, isolate());
- Set(rax, static_cast<int64_t>(false));
- Store(external_caught, rax);
-
- // Set pending exception and rax to out of memory exception.
- ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- isolate());
- movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
- Store(pending_exception, rax);
- }
+ // Set the top handler address to next handler past the top ENTRY handler.
+ pop(ExternalOperand(handler_address));
- // Discard the context saved in the handler and clear the context pointer.
- pop(rdx);
- Set(rsi, 0);
+ // Remove the code object and state, compute the handler address in rdi.
+ pop(rdi); // Code object.
+ pop(rdx); // Offset and state.
- pop(rbp); // Restore frame pointer.
- pop(rdx); // Discard state.
+ // Clear the context pointer and frame pointer (0 was saved in the handler).
+ pop(rsi);
+ pop(rbp);
- ret(0);
+ JumpToHandlerEntry();
}
@@ -2684,15 +2729,48 @@ void MacroAssembler::StoreNumberToDoubleElements(
}
+void MacroAssembler::CompareMap(Register obj,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode) {
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
+ if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
+ Map* transitioned_fast_element_map(
+ map->LookupElementsTransitionMap(FAST_ELEMENTS, NULL));
+ ASSERT(transitioned_fast_element_map == NULL ||
+ map->elements_kind() != FAST_ELEMENTS);
+ if (transitioned_fast_element_map != NULL) {
+ j(equal, early_success, Label::kNear);
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(transitioned_fast_element_map));
+ }
+
+ Map* transitioned_double_map(
+ map->LookupElementsTransitionMap(FAST_DOUBLE_ELEMENTS, NULL));
+ ASSERT(transitioned_double_map == NULL ||
+ map->elements_kind() == FAST_SMI_ONLY_ELEMENTS);
+ if (transitioned_double_map != NULL) {
+ j(equal, early_success, Label::kNear);
+ Cmp(FieldOperand(obj, HeapObject::kMapOffset),
+ Handle<Map>(transitioned_double_map));
+ }
+ }
+}
+
+
void MacroAssembler::CheckMap(Register obj,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type) {
+ SmiCheckType smi_check_type,
+ CompareMapMode mode) {
if (smi_check_type == DO_SMI_CHECK) {
JumpIfSmi(obj, fail);
}
- Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
+
+ Label success;
+ CompareMap(obj, map, &success, mode);
j(not_equal, fail);
+ bind(&success);
}
@@ -2782,6 +2860,14 @@ void MacroAssembler::AbortIfNotSmi(const Operand& object) {
}
+void MacroAssembler::AbortIfNotZeroExtended(Register int32_register) {
+ ASSERT(!int32_register.is(kScratchRegister));
+ movq(kScratchRegister, 0x100000000l, RelocInfo::NONE);
+ cmpq(kScratchRegister, int32_register);
+ Assert(above_equal, "32 bit value in register is not zero-extended");
+}
+
+
void MacroAssembler::AbortIfNotString(Register object) {
testb(object, Immediate(kSmiTagMask));
Assert(not_equal, "Operand is not a string");
@@ -2942,26 +3028,30 @@ void MacroAssembler::InvokeCode(Register code,
ASSERT(flag == JUMP_FUNCTION || has_frame());
Label done;
+ bool definitely_mismatches = false;
InvokePrologue(expected,
actual,
Handle<Code>::null(),
code,
&done,
+ &definitely_mismatches,
flag,
Label::kNear,
call_wrapper,
call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code));
- SetCallKind(rcx, call_kind);
- call(code);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(rcx, call_kind);
- jmp(code);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code));
+ SetCallKind(rcx, call_kind);
+ call(code);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(rcx, call_kind);
+ jmp(code);
+ }
+ bind(&done);
}
- bind(&done);
}
@@ -2976,27 +3066,31 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
ASSERT(flag == JUMP_FUNCTION || has_frame());
Label done;
+ bool definitely_mismatches = false;
Register dummy = rax;
InvokePrologue(expected,
actual,
code,
dummy,
&done,
+ &definitely_mismatches,
flag,
Label::kNear,
call_wrapper,
call_kind);
- if (flag == CALL_FUNCTION) {
- call_wrapper.BeforeCall(CallSize(code));
- SetCallKind(rcx, call_kind);
- Call(code, rmode);
- call_wrapper.AfterCall();
- } else {
- ASSERT(flag == JUMP_FUNCTION);
- SetCallKind(rcx, call_kind);
- Jump(code, rmode);
+ if (!definitely_mismatches) {
+ if (flag == CALL_FUNCTION) {
+ call_wrapper.BeforeCall(CallSize(code));
+ SetCallKind(rcx, call_kind);
+ Call(code, rmode);
+ call_wrapper.AfterCall();
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ SetCallKind(rcx, call_kind);
+ Jump(code, rmode);
+ }
+ bind(&done);
}
- bind(&done);
}
@@ -3031,7 +3125,7 @@ void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
ASSERT(flag == JUMP_FUNCTION || has_frame());
// Get the function and setup the context.
- Move(rdi, function);
+ LoadHeapObject(rdi, function);
movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// We call indirectly through the code field in the function to
@@ -3048,11 +3142,13 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
Handle<Code> code_constant,
Register code_register,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
Label::Distance near_jump,
const CallWrapper& call_wrapper,
CallKind call_kind) {
bool definitely_matches = false;
+ *definitely_mismatches = false;
Label invoke;
if (expected.is_immediate()) {
ASSERT(actual.is_immediate());
@@ -3068,6 +3164,7 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
// arguments.
definitely_matches = true;
} else {
+ *definitely_mismatches = true;
Set(rbx, expected.immediate());
}
}
@@ -3104,7 +3201,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
SetCallKind(rcx, call_kind);
Call(adaptor, RelocInfo::CODE_TARGET);
call_wrapper.AfterCall();
- jmp(done, near_jump);
+ if (!*definitely_mismatches) {
+ jmp(done, near_jump);
+ }
} else {
SetCallKind(rcx, call_kind);
Jump(adaptor, RelocInfo::CODE_TARGET);
@@ -3143,7 +3242,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) {
void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
- // Setup the frame structure on the stack.
+ // Set up the frame structure on the stack.
// All constants are relative to the frame pointer of the exit frame.
ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
@@ -3203,7 +3302,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
EnterExitFramePrologue(true);
- // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
+ // Set up argv in callee-saved register r15. It is reused in LeaveExitFrame,
// so it must be retained across the C-call.
int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
lea(r15, Operand(rbp, r14, times_pointer_size, offset));
@@ -3331,6 +3430,42 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
}
+void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
+ // First of all we assign the hash seed to scratch.
+ LoadRoot(scratch, Heap::kHashSeedRootIndex);
+ SmiToInteger32(scratch, scratch);
+
+ // Xor original key with a seed.
+ xorl(r0, scratch);
+
+ // Compute the hash code from the untagged key. This must be kept in sync
+ // with ComputeIntegerHash in utils.h.
+ //
+ // hash = ~hash + (hash << 15);
+ movl(scratch, r0);
+ notl(r0);
+ shll(scratch, Immediate(15));
+ addl(r0, scratch);
+ // hash = hash ^ (hash >> 12);
+ movl(scratch, r0);
+ shrl(scratch, Immediate(12));
+ xorl(r0, scratch);
+ // hash = hash + (hash << 2);
+ leal(r0, Operand(r0, r0, times_4, 0));
+ // hash = hash ^ (hash >> 4);
+ movl(scratch, r0);
+ shrl(scratch, Immediate(4));
+ xorl(r0, scratch);
+ // hash = hash * 2057;
+ imull(r0, r0, Immediate(2057));
+ // hash = hash ^ (hash >> 16);
+ movl(scratch, r0);
+ shrl(scratch, Immediate(16));
+ xorl(r0, scratch);
+}
+
+
+
void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Register elements,
Register key,
@@ -3361,34 +3496,11 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
Label done;
- // Compute the hash code from the untagged key. This must be kept in sync
- // with ComputeIntegerHash in utils.h.
- //
- // hash = ~hash + (hash << 15);
- movl(r1, r0);
- notl(r0);
- shll(r1, Immediate(15));
- addl(r0, r1);
- // hash = hash ^ (hash >> 12);
- movl(r1, r0);
- shrl(r1, Immediate(12));
- xorl(r0, r1);
- // hash = hash + (hash << 2);
- leal(r0, Operand(r0, r0, times_4, 0));
- // hash = hash ^ (hash >> 4);
- movl(r1, r0);
- shrl(r1, Immediate(4));
- xorl(r0, r1);
- // hash = hash * 2057;
- imull(r0, r0, Immediate(2057));
- // hash = hash ^ (hash >> 16);
- movl(r1, r0);
- shrl(r1, Immediate(16));
- xorl(r0, r1);
+ GetNumberHash(r0, r1);
// Compute capacity mask.
- SmiToInteger32(r1,
- FieldOperand(elements, NumberDictionary::kCapacityOffset));
+ SmiToInteger32(r1, FieldOperand(elements,
+ SeededNumberDictionary::kCapacityOffset));
decl(r1);
// Generate an unrolled loop that performs a few probes before giving up.
@@ -3398,19 +3510,19 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
movq(r2, r0);
// Compute the masked index: (hash + i + i * i) & mask.
if (i > 0) {
- addl(r2, Immediate(NumberDictionary::GetProbeOffset(i)));
+ addl(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
}
and_(r2, r1);
// Scale the index by multiplying by the entry size.
- ASSERT(NumberDictionary::kEntrySize == 3);
+ ASSERT(SeededNumberDictionary::kEntrySize == 3);
lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
// Check if the key matches.
cmpq(key, FieldOperand(elements,
r2,
times_pointer_size,
- NumberDictionary::kElementsStartOffset));
+ SeededNumberDictionary::kElementsStartOffset));
if (i != (kProbes - 1)) {
j(equal, &done);
} else {
@@ -3421,7 +3533,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
bind(&done);
// Check that the value is a normal propety.
const int kDetailsOffset =
- NumberDictionary::kElementsStartOffset + 2 * kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
ASSERT_EQ(NORMAL, 0);
Test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
Smi::FromInt(PropertyDetails::TypeField::kMask));
@@ -3429,7 +3541,7 @@ void MacroAssembler::LoadFromNumberDictionary(Label* miss,
// Get the value at the masked, scaled index.
const int kValueOffset =
- NumberDictionary::kElementsStartOffset + kPointerSize;
+ SeededNumberDictionary::kElementsStartOffset + kPointerSize;
movq(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
}
@@ -3724,7 +3836,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
subq(scratch1, Immediate(kHeaderAlignment));
}
- // Allocate ascii string in new space.
+ // Allocate ASCII string in new space.
AllocateInNewSpace(SeqAsciiString::kHeaderSize,
times_1,
scratch1,
@@ -3919,6 +4031,46 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
}
}
+
+void MacroAssembler::LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match) {
+ // Load the global or builtins object from the current context.
+ movq(scratch, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
+
+ // Check that the function's map is the same as the expected cached map.
+ int expected_index =
+ Context::GetContextMapIndexFromElementsKind(expected_kind);
+ cmpq(map_in_out, Operand(scratch, Context::SlotOffset(expected_index)));
+ j(not_equal, no_map_match);
+
+ // Use the transitioned cached map.
+ int trans_index =
+ Context::GetContextMapIndexFromElementsKind(transitioned_kind);
+ movq(map_in_out, Operand(scratch, Context::SlotOffset(trans_index)));
+}
+
+
+void MacroAssembler::LoadInitialArrayMap(
+ Register function_in, Register scratch, Register map_out) {
+ ASSERT(!function_in.is(map_out));
+ Label done;
+ movq(map_out, FieldOperand(function_in,
+ JSFunction::kPrototypeOrInitialMapOffset));
+ if (!FLAG_smi_only_arrays) {
+ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ map_out,
+ scratch,
+ &done);
+ }
+ bind(&done);
+}
+
#ifdef _WIN64
static const int kRegisterPassedArguments = 4;
#else
@@ -4228,6 +4380,52 @@ void MacroAssembler::EnsureNotWhite(
bind(&done);
}
+
+void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
+ Label next;
+ Register empty_fixed_array_value = r8;
+ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
+ Register empty_descriptor_array_value = r9;
+ LoadRoot(empty_descriptor_array_value,
+ Heap::kEmptyDescriptorArrayRootIndex);
+ movq(rcx, rax);
+ bind(&next);
+
+ // Check that there are no elements. Register rcx contains the
+ // current JS object we've reached through the prototype chain.
+ cmpq(empty_fixed_array_value,
+ FieldOperand(rcx, JSObject::kElementsOffset));
+ j(not_equal, call_runtime);
+
+ // Check that instance descriptors are not empty so that we can
+ // check for an enum cache. Leave the map in rbx for the subsequent
+ // prototype load.
+ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
+ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOrBitField3Offset));
+ JumpIfSmi(rdx, call_runtime);
+
+ // Check that there is an enum cache in the non-empty instance
+ // descriptors (rdx). This is the case if the next enumeration
+ // index field does not contain a smi.
+ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
+ JumpIfSmi(rdx, call_runtime);
+
+ // For all objects but the receiver, check that the cache is empty.
+ Label check_prototype;
+ cmpq(rcx, rax);
+ j(equal, &check_prototype, Label::kNear);
+ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
+ cmpq(rdx, empty_fixed_array_value);
+ j(not_equal, call_runtime);
+
+ // Load the prototype from the map and loop if non-null.
+ bind(&check_prototype);
+ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
+ cmpq(rcx, null_value);
+ j(not_equal, &next);
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
diff --git a/src/3rdparty/v8/src/x64/macro-assembler-x64.h b/src/3rdparty/v8/src/x64/macro-assembler-x64.h
index b0563cd..6bb5cfe 100644
--- a/src/3rdparty/v8/src/x64/macro-assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/macro-assembler-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -50,14 +50,14 @@ enum AllocationFlags {
// Default scratch register used by MacroAssembler (and other code that needs
// a spare register). The register isn't callee save, and not used by the
// function calling convention.
-static const Register kScratchRegister = { 10 }; // r10.
-static const Register kSmiConstantRegister = { 12 }; // r12 (callee save).
-static const Register kRootRegister = { 13 }; // r13 (callee save).
+const Register kScratchRegister = { 10 }; // r10.
+const Register kSmiConstantRegister = { 12 }; // r12 (callee save).
+const Register kRootRegister = { 13 }; // r13 (callee save).
// Value of smi in kSmiConstantRegister.
-static const int kSmiConstantRegisterValue = 1;
+const int kSmiConstantRegisterValue = 1;
// Actual value of root register is offset from the root array's start
// to take advantage of negitive 8-bit displacement values.
-static const int kRootRegisterBias = 128;
+const int kRootRegisterBias = 128;
// Convenience for platform-independent signatures.
typedef Operand MemOperand;
@@ -203,7 +203,7 @@ class MacroAssembler: public Assembler {
Label* on_black,
Label::Distance on_black_distance = Label::kFar);
- // Detects conservatively whether an object is data-only, ie it does need to
+ // Detects conservatively whether an object is data-only, i.e. it does need to
// be scanned by the garbage collector.
void JumpIfDataObject(Register value,
Register scratch,
@@ -328,7 +328,7 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// JavaScript invokes
- // Setup call kind marking in rcx. The method takes rcx as an
+ // Set up call kind marking in rcx. The method takes rcx as an
// explicit first parameter to make the code more readable at the
// call sites.
void SetCallKind(Register dst, CallKind kind);
@@ -745,7 +745,7 @@ class MacroAssembler: public Assembler {
Label* on_not_both_flat_ascii,
Label::Distance near_jump = Label::kFar);
- // Check whether the instance type represents a flat ascii string. Jump to the
+ // Check whether the instance type represents a flat ASCII string. Jump to the
// label if not. If the instance type can be scratched specify same register
// for both instance type and scratch.
void JumpIfInstanceTypeIsNotSequentialAscii(
@@ -784,6 +784,22 @@ class MacroAssembler: public Assembler {
void Cmp(const Operand& dst, Smi* src);
void Push(Handle<Object> source);
+ // Load a heap object and handle the case of new-space objects by
+ // indirecting via a global cell.
+ void LoadHeapObject(Register result, Handle<HeapObject> object);
+ void PushHeapObject(Handle<HeapObject> object);
+
+ void LoadObject(Register result, Handle<Object> object) {
+ if (object->IsHeapObject()) {
+ LoadHeapObject(result, Handle<HeapObject>::cast(object));
+ } else {
+ Move(result, object);
+ }
+ }
+
+ // Load a global cell into a register.
+ void LoadGlobalCell(Register dst, Handle<JSGlobalPropertyCell> cell);
+
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the rsp register.
void Drop(int stack_elements);
@@ -873,13 +889,24 @@ class MacroAssembler: public Assembler {
XMMRegister xmm_scratch,
Label* fail);
- // Check if the map of an object is equal to a specified map and
- // branch to label if not. Skip the smi check if not required
- // (object is known to be a heap object)
+ // Compare an object's map with the specified map and its transitioned
+ // elements maps if mode is ALLOW_ELEMENT_TRANSITION_MAPS. FLAGS are set with
+ // result of map compare. If multiple map compares are required, the compare
+ // sequences branches to early_success.
+ void CompareMap(Register obj,
+ Handle<Map> map,
+ Label* early_success,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
+
+ // Check if the map of an object is equal to a specified map and branch to
+ // label if not. Skip the smi check if not required (object is known to be a
+ // heap object). If mode is ALLOW_ELEMENT_TRANSITION_MAPS, then also match
+ // against maps that are ElementsKind transition maps of the specified map.
void CheckMap(Register obj,
Handle<Map> map,
Label* fail,
- SmiCheckType smi_check_type);
+ SmiCheckType smi_check_type,
+ CompareMapMode mode = REQUIRE_EXACT_MAP);
// Check if the map of an object is equal to a specified map and branch to a
// specified target if equal. Skip the smi check if not required (object is
@@ -922,6 +949,10 @@ class MacroAssembler: public Assembler {
void AbortIfNotSmi(Register object);
void AbortIfNotSmi(const Operand& object);
+ // Abort execution if a 64 bit register containing a 32 bit payload does not
+ // have zeros in the top 32 bits.
+ void AbortIfNotZeroExtended(Register reg);
+
// Abort execution if argument is a string. Used in debug code.
void AbortIfNotString(Register object);
@@ -933,9 +964,8 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Exception handling
- // Push a new try handler and link into try handler chain. The return
- // address must be pushed before calling this helper.
- void PushTryHandler(CodeLocation try_location, HandlerType type);
+ // Push a new try handler and link it into try handler chain.
+ void PushTryHandler(StackHandler::Kind kind, int handler_index);
// Unlink the stack handler on top of the stack from the try handler chain.
void PopTryHandler();
@@ -945,7 +975,7 @@ class MacroAssembler: public Assembler {
void Throw(Register value);
// Propagate an uncatchable exception out of the current JS stack.
- void ThrowUncatchable(UncatchableExceptionType type, Register value);
+ void ThrowUncatchable(Register value);
// ---------------------------------------------------------------------------
// Inline caching support
@@ -958,6 +988,7 @@ class MacroAssembler: public Assembler {
Register scratch,
Label* miss);
+ void GetNumberHash(Register r0, Register scratch);
void LoadFromNumberDictionary(Label* miss,
Register elements,
@@ -1094,6 +1125,22 @@ class MacroAssembler: public Assembler {
// Find the function context up the context chain.
void LoadContext(Register dst, int context_chain_length);
+ // Conditionally load the cached Array transitioned map of type
+ // transitioned_kind from the global context if the map in register
+ // map_in_out is the cached Array map in the global context of
+ // expected_kind.
+ void LoadTransitionedArrayMapConditional(
+ ElementsKind expected_kind,
+ ElementsKind transitioned_kind,
+ Register map_in_out,
+ Register scratch,
+ Label* no_map_match);
+
+ // Load the initial map for new Arrays from a JSFunction.
+ void LoadInitialArrayMap(Register function_in,
+ Register scratch,
+ Register map_out);
+
// Load the global function with the given index.
void LoadGlobalFunction(int index, Register function);
@@ -1252,10 +1299,15 @@ class MacroAssembler: public Assembler {
void EnterFrame(StackFrame::Type type);
void LeaveFrame(StackFrame::Type type);
+ // Expects object in rax and returns map with validated enum cache
+ // in rax. Assumes that any other register can be used as a scratch.
+ void CheckEnumCache(Register null_value,
+ Label* call_runtime);
+
private:
// Order general registers are pushed by Pushad.
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
- static int kSafepointPushRegisterIndices[Register::kNumRegisters];
+ static const int kSafepointPushRegisterIndices[Register::kNumRegisters];
static const int kNumSafepointSavedRegisters = 11;
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
@@ -1280,6 +1332,7 @@ class MacroAssembler: public Assembler {
Handle<Code> code_constant,
Register code_register,
Label* done,
+ bool* definitely_mismatches,
InvokeFlag flag,
Label::Distance near_jump = Label::kFar,
const CallWrapper& call_wrapper = NullCallWrapper(),
@@ -1326,6 +1379,10 @@ class MacroAssembler: public Assembler {
Register bitmap_reg,
Register mask_reg);
+ // Helper for throwing exceptions. Compute a handler address and jump to
+ // it. See the implementation for register usage.
+ void JumpToHandlerEntry();
+
// Compute memory operands for safepoint stack slots.
Operand SafepointRegisterSlot(Register reg);
static int SafepointRegisterStackIndex(int reg_code) {
@@ -1362,37 +1419,32 @@ class CodePatcher {
// Static helper functions.
// Generate an Operand for loading a field from an object.
-static inline Operand FieldOperand(Register object, int offset) {
+inline Operand FieldOperand(Register object, int offset) {
return Operand(object, offset - kHeapObjectTag);
}
// Generate an Operand for loading an indexed field from an object.
-static inline Operand FieldOperand(Register object,
- Register index,
- ScaleFactor scale,
- int offset) {
+inline Operand FieldOperand(Register object,
+ Register index,
+ ScaleFactor scale,
+ int offset) {
return Operand(object, index, scale, offset - kHeapObjectTag);
}
-static inline Operand ContextOperand(Register context, int index) {
+inline Operand ContextOperand(Register context, int index) {
return Operand(context, Context::SlotOffset(index));
}
-static inline Operand GlobalObjectOperand() {
+inline Operand GlobalObjectOperand() {
return ContextOperand(rsi, Context::GLOBAL_INDEX);
}
-static inline Operand QmlGlobalObjectOperand() {
- return ContextOperand(rsi, Context::QML_GLOBAL_INDEX);
-}
-
-
// Provides access to exit frame stack space (not GCed).
-static inline Operand StackSpaceOperand(int index) {
+inline Operand StackSpaceOperand(int index) {
#ifdef _WIN64
const int kShaddowSpace = 4;
return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
diff --git a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
index 1e0cd6a..bf232bf 100644
--- a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.cc
@@ -226,7 +226,7 @@ void RegExpMacroAssemblerX64::CheckCharacters(Vector<const uc16> str,
bool check_end_of_string) {
#ifdef DEBUG
// If input is ASCII, don't even bother calling here if the string to
- // match contains a non-ascii character.
+ // match contains a non-ASCII character.
if (mode_ == ASCII) {
ASSERT(String::IsAscii(str.start(), str.length()));
}
@@ -542,9 +542,13 @@ void RegExpMacroAssemblerX64::CheckNotCharacter(uint32_t c,
void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_equal) {
- __ movl(rax, current_character());
- __ and_(rax, Immediate(mask));
- __ cmpl(rax, Immediate(c));
+ if (c == 0) {
+ __ testl(current_character(), Immediate(mask));
+ } else {
+ __ movl(rax, Immediate(mask));
+ __ and_(rax, current_character());
+ __ cmpl(rax, Immediate(c));
+ }
BranchOrBacktrack(equal, on_equal);
}
@@ -552,9 +556,13 @@ void RegExpMacroAssemblerX64::CheckCharacterAfterAnd(uint32_t c,
void RegExpMacroAssemblerX64::CheckNotCharacterAfterAnd(uint32_t c,
uint32_t mask,
Label* on_not_equal) {
- __ movl(rax, current_character());
- __ and_(rax, Immediate(mask));
- __ cmpl(rax, Immediate(c));
+ if (c == 0) {
+ __ testl(current_character(), Immediate(mask));
+ } else {
+ __ movl(rax, Immediate(mask));
+ __ and_(rax, current_character());
+ __ cmpl(rax, Immediate(c));
+ }
BranchOrBacktrack(not_equal, on_not_equal);
}
@@ -564,7 +572,7 @@ void RegExpMacroAssemblerX64::CheckNotCharacterAfterMinusAnd(
uc16 minus,
uc16 mask,
Label* on_not_equal) {
- ASSERT(minus < String::kMaxUC16CharCode);
+ ASSERT(minus < String::kMaxUtf16CodeUnit);
__ lea(rax, Operand(current_character(), -minus));
__ and_(rax, Immediate(mask));
__ cmpl(rax, Immediate(c));
@@ -572,6 +580,42 @@ void RegExpMacroAssemblerX64::CheckNotCharacterAfterMinusAnd(
}
+void RegExpMacroAssemblerX64::CheckCharacterInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_in_range) {
+ __ leal(rax, Operand(current_character(), -from));
+ __ cmpl(rax, Immediate(to - from));
+ BranchOrBacktrack(below_equal, on_in_range);
+}
+
+
+void RegExpMacroAssemblerX64::CheckCharacterNotInRange(
+ uc16 from,
+ uc16 to,
+ Label* on_not_in_range) {
+ __ leal(rax, Operand(current_character(), -from));
+ __ cmpl(rax, Immediate(to - from));
+ BranchOrBacktrack(above, on_not_in_range);
+}
+
+
+void RegExpMacroAssemblerX64::CheckBitInTable(
+ Handle<ByteArray> table,
+ Label* on_bit_set) {
+ __ Move(rax, table);
+ Register index = current_character();
+ if (mode_ != ASCII || kTableMask != String::kMaxAsciiCharCode) {
+ __ movq(rbx, current_character());
+ __ and_(rbx, Immediate(kTableMask));
+ index = rbx;
+ }
+ __ cmpb(FieldOperand(rax, index, times_1, ByteArray::kHeaderSize),
+ Immediate(0));
+ BranchOrBacktrack(not_equal, on_bit_set);
+}
+
+
bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass(uc16 type,
Label* on_no_match) {
// Range checks (c in min..max) are generally implemented by an unsigned
@@ -1192,7 +1236,7 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address,
ASSERT(*return_address <=
re_code->instruction_start() + re_code->instruction_size());
- MaybeObject* result = Execution::HandleStackGuardInterrupt();
+ MaybeObject* result = Execution::HandleStackGuardInterrupt(isolate);
if (*code_handle != re_code) { // Return address no longer valid
intptr_t delta = code_handle->address() - re_code->address();
diff --git a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
index 7102225..cd24b60 100644
--- a/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
+++ b/src/3rdparty/v8/src/x64/regexp-macro-assembler-x64.h
@@ -75,6 +75,14 @@ class RegExpMacroAssemblerX64: public NativeRegExpMacroAssembler {
uc16 minus,
uc16 mask,
Label* on_not_equal);
+ virtual void CheckCharacterInRange(uc16 from,
+ uc16 to,
+ Label* on_in_range);
+ virtual void CheckCharacterNotInRange(uc16 from,
+ uc16 to,
+ Label* on_not_in_range);
+ virtual void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set);
+
// Checks whether the given offset from the current position is before
// the end of the string.
virtual void CheckPosition(int cp_offset, Label* on_outside_input);
diff --git a/src/3rdparty/v8/src/x64/stub-cache-x64.cc b/src/3rdparty/v8/src/x64/stub-cache-x64.cc
index 867c71a..f07f6b6 100644
--- a/src/3rdparty/v8/src/x64/stub-cache-x64.cc
+++ b/src/3rdparty/v8/src/x64/stub-cache-x64.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,32 +43,61 @@ static void ProbeTable(Isolate* isolate,
MacroAssembler* masm,
Code::Flags flags,
StubCache::Table table,
+ Register receiver,
Register name,
+ // The offset is scaled by 4, based on
+ // kHeapObjectTagSize, which is two bits
Register offset) {
- ASSERT_EQ(8, kPointerSize);
- ASSERT_EQ(16, sizeof(StubCache::Entry));
+ // We need to scale up the pointer by 2 because the offset is scaled by less
+ // than the pointer size.
+ ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
+ ScaleFactor scale_factor = times_2;
+
+ ASSERT_EQ(24, sizeof(StubCache::Entry));
// The offset register holds the entry offset times four (due to masking
// and shifting optimizations).
ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
+ ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
Label miss;
+ // Multiply by 3 because there are 3 fields per entry (name, code, map).
+ __ lea(offset, Operand(offset, offset, times_2, 0));
+
__ LoadAddress(kScratchRegister, key_offset);
+
// Check that the key in the entry matches the name.
// Multiply entry offset by 16 to get the entry address. Since the
// offset register already holds the entry offset times four, multiply
// by a further four.
- __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
+ __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
__ j(not_equal, &miss);
+
+ // Get the map entry from the cache.
+ // Use key_offset + kPointerSize * 2, rather than loading map_offset.
+ __ movq(kScratchRegister,
+ Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
+ __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ j(not_equal, &miss);
+
// Get the code entry from the cache.
- // Use key_offset + kPointerSize, rather than loading value_offset.
+ __ LoadAddress(kScratchRegister, value_offset);
__ movq(kScratchRegister,
- Operand(kScratchRegister, offset, times_4, kPointerSize));
+ Operand(kScratchRegister, offset, scale_factor, 0));
+
// Check that the flags match what we're looking for.
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
__ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
__ cmpl(offset, Immediate(flags));
__ j(not_equal, &miss);
+#ifdef DEBUG
+ if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
+ __ jmp(&miss);
+ } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
+ __ jmp(&miss);
+ }
+#endif
+
// Jump to the first instruction in the code stub.
__ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
__ jmp(kScratchRegister);
@@ -134,14 +163,16 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
Register name,
Register scratch,
Register extra,
- Register extra2) {
+ Register extra2,
+ Register extra3) {
Isolate* isolate = masm->isolate();
Label miss;
USE(extra); // The register extra is not used on the X64 platform.
USE(extra2); // The register extra2 is not used on the X64 platform.
- // Make sure that code is valid. The shifting code relies on the
- // entry size being 16.
- ASSERT(sizeof(Entry) == 16);
+ USE(extra3); // The register extra2 is not used on the X64 platform.
+ // Make sure that code is valid. The multiplying code relies on the
+ // entry size being 24.
+ ASSERT(sizeof(Entry) == 24);
// Make sure the flags do not name a specific type.
ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
@@ -153,6 +184,10 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Check scratch register is valid, extra and extra2 are unused.
ASSERT(!scratch.is(no_reg));
ASSERT(extra2.is(no_reg));
+ ASSERT(extra3.is(no_reg));
+
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
// Check that the receiver isn't a smi.
__ JumpIfSmi(receiver, &miss);
@@ -162,10 +197,12 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
// Use only the low 32 bits of the map pointer.
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
__ xor_(scratch, Immediate(flags));
+ // We mask out the last two bits because they are not part of the hash and
+ // they are always 01 for maps. Also in the two 'and' instructions below.
__ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
// Probe the primary table.
- ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
+ ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
// Primary miss: Compute hash for secondary probe.
__ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
@@ -177,11 +214,12 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
__ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
// Probe the secondary table.
- ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
+ ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
// Cache miss: Fall-through and let caller handle the miss by
// entering the runtime system.
__ bind(&miss);
+ __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
}
@@ -421,7 +459,7 @@ static void GenerateFastApiCall(MacroAssembler* masm,
// -----------------------------------
// Get the function and setup the context.
Handle<JSFunction> function = optimization.constant_function();
- __ Move(rdi, function);
+ __ LoadHeapObject(rdi, function);
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Pass the additional arguments.
@@ -691,13 +729,9 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm,
Register name_reg,
Register scratch,
Label* miss_label) {
- // Check that the object isn't a smi.
- __ JumpIfSmi(receiver_reg, miss_label);
-
// Check that the map of the object hasn't changed.
- __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
- Handle<Map>(object->map()));
- __ j(not_equal, miss_label);
+ __ CheckMap(receiver_reg, Handle<Map>(object->map()),
+ miss_label, DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -864,12 +898,10 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
if (in_new_space) {
// Save the map in scratch1 for later.
__ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
- __ Cmp(scratch1, current_map);
- } else {
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), current_map);
}
- // Branch on the result of the map check.
- __ j(not_equal, miss);
+ __ CheckMap(reg, Handle<Map>(current_map),
+ miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
+
// Check access rights to the global object. This has to happen after
// the map check so that we know that the object is actually a global
// object.
@@ -901,8 +933,8 @@ Register StubCompiler::CheckPrototypes(Handle<JSObject> object,
LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
// Check the holder map.
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
- __ j(not_equal, miss);
+ __ CheckMap(reg, Handle<Map>(holder->map()),
+ miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform security check for access to the global object.
ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
@@ -988,7 +1020,7 @@ void StubCompiler::GenerateLoadCallback(Handle<JSObject> object,
__ movq(name_arg, rsp);
__ push(scratch2); // Restore return address.
- // 3 elements array for v8::Agruments::values_ and handler for name.
+ // 3 elements array for v8::Arguments::values_ and handler for name.
const int kStackSpace = 4;
// Allocate v8::AccessorInfo in non-GCed stack space.
@@ -1015,7 +1047,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
Register scratch1,
Register scratch2,
Register scratch3,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name,
Label* miss) {
// Check that the receiver isn't a smi.
@@ -1026,7 +1058,7 @@ void StubCompiler::GenerateLoadConstant(Handle<JSObject> object,
object, receiver, holder, scratch1, scratch2, scratch3, name, miss);
// Return the constant value.
- __ Move(rax, value);
+ __ LoadHeapObject(rax, value);
__ ret(0);
}
@@ -1051,7 +1083,7 @@ void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object,
// and CALLBACKS, so inline only them, other cases may be added
// later.
bool compile_followup_inline = false;
- if (lookup->IsProperty() && lookup->IsCacheable()) {
+ if (lookup->IsFound() && lookup->IsCacheable()) {
if (lookup->type() == FIELD) {
compile_followup_inline = true;
} else if (lookup->type() == CALLBACKS &&
@@ -1192,14 +1224,9 @@ void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
// Get the receiver from the stack.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual calls. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(rdx, miss);
- }
// Check that the maps haven't changed.
+ __ JumpIfSmi(rdx, miss);
CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
}
@@ -1337,24 +1364,24 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
} else {
Label call_builtin;
- // Get the elements array of the object.
- __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
-
- // Check that the elements are in fast mode and writable.
- __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
- factory()->fixed_array_map());
- __ j(not_equal, &call_builtin);
-
if (argc == 1) { // Otherwise fall through to call builtin.
Label attempt_to_grow_elements, with_write_barrier;
+ // Get the elements array of the object.
+ __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset));
+
+ // Check that the elements are in fast mode and writable.
+ __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
+ factory()->fixed_array_map());
+ __ j(not_equal, &call_builtin);
+
// Get the array's length into rax and calculate new length.
__ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
__ addl(rax, Immediate(argc));
- // Get the element's length into rcx.
- __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
+ // Get the elements' length into rcx.
+ __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
// Check if we could survive without allocation.
__ cmpl(rax, rcx);
@@ -1367,30 +1394,52 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
- // Push the element.
- __ lea(rdx, FieldOperand(rbx,
- rax, times_pointer_size,
- FixedArray::kHeaderSize - argc * kPointerSize));
- __ movq(Operand(rdx, 0), rcx);
+ // Store the value.
+ __ movq(FieldOperand(rdi,
+ rax,
+ times_pointer_size,
+ FixedArray::kHeaderSize - argc * kPointerSize),
+ rcx);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
__ ret((argc + 1) * kPointerSize);
__ bind(&with_write_barrier);
- __ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset));
- __ CheckFastObjectElements(rdi, &call_builtin);
+ __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
+
+ if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) {
+ Label fast_object, not_fast_object;
+ __ CheckFastObjectElements(rbx, &not_fast_object, Label::kNear);
+ __ jmp(&fast_object);
+ // In case of fast smi-only, convert to fast object, otherwise bail out.
+ __ bind(&not_fast_object);
+ __ CheckFastSmiOnlyElements(rbx, &call_builtin);
+ // rdx: receiver
+ // rbx: map
+ __ movq(r9, rdi); // Backup rdi as it is going to be trashed.
+ __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
+ FAST_ELEMENTS,
+ rbx,
+ rdi,
+ &call_builtin);
+ ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm());
+ __ movq(rdi, r9);
+ __ bind(&fast_object);
+ } else {
+ __ CheckFastObjectElements(rbx, &call_builtin);
+ }
// Save new length.
__ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
- // Push the element.
- __ lea(rdx, FieldOperand(rbx,
+ // Store the value.
+ __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ movq(Operand(rdx, 0), rcx);
- __ RecordWrite(rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
+ __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Integer32ToSmi(rax, rax); // Return new length as smi.
@@ -1401,11 +1450,11 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ jmp(&call_builtin);
}
- __ movq(rdi, Operand(rsp, argc * kPointerSize));
+ __ movq(rbx, Operand(rsp, argc * kPointerSize));
// Growing elements that are SMI-only requires special handling in case
// the new element is non-Smi. For now, delegate to the builtin.
Label no_fast_elements_check;
- __ JumpIfSmi(rdi, &no_fast_elements_check);
+ __ JumpIfSmi(rbx, &no_fast_elements_check);
__ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
__ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
__ bind(&no_fast_elements_check);
@@ -1420,7 +1469,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Load(rcx, new_space_allocation_top);
// Check if it's the end of elements.
- __ lea(rdx, FieldOperand(rbx,
+ __ lea(rdx, FieldOperand(rdi,
rax, times_pointer_size,
FixedArray::kHeaderSize - argc * kPointerSize));
__ cmpq(rdx, rcx);
@@ -1435,7 +1484,7 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
__ Store(new_space_allocation_top, rcx);
// Push the argument...
- __ movq(Operand(rdx, 0), rdi);
+ __ movq(Operand(rdx, 0), rbx);
// ... and fill the rest with holes.
__ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
for (int i = 1; i < kAllocationDelta; i++) {
@@ -1447,13 +1496,13 @@ Handle<Code> CallStubCompiler::CompileArrayPushCall(
// tell the incremental marker to rescan the object that we just grew. We
// don't need to worry about the holes because they are in old space and
// already marked black.
- __ RecordWrite(rbx, rdx, rdi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
+ __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
// Restore receiver to rdx as finish sequence assumes it's here.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Increment element's and array's sizes.
- __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
+ __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
Smi::FromInt(kAllocationDelta));
// Make new length a smi before returning it.
@@ -1600,7 +1649,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
Register receiver = rbx;
Register index = rdi;
- Register scratch = rdx;
Register result = rax;
__ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
if (argc > 0) {
@@ -1611,7 +1659,6 @@ Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
StringCharCodeAtGenerator generator(receiver,
index,
- scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -1680,8 +1727,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
Register receiver = rax;
Register index = rdi;
- Register scratch1 = rbx;
- Register scratch2 = rdx;
+ Register scratch = rdx;
Register result = rax;
__ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
if (argc > 0) {
@@ -1692,8 +1738,7 @@ Handle<Code> CallStubCompiler::CompileStringCharAtCall(
StringCharAtGenerator generator(receiver,
index,
- scratch1,
- scratch2,
+ scratch,
result,
&miss, // When not a string.
&miss, // When not a number.
@@ -2019,7 +2064,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case STRING_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
// Check that the object is a two-byte string or a symbol.
__ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
__ j(above_equal, &miss);
@@ -2037,7 +2082,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case NUMBER_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a smi or a heap number.
__ JumpIfSmi(rdx, &fast);
@@ -2058,7 +2103,7 @@ Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object,
break;
case BOOLEAN_CHECK:
- if (function->IsBuiltin() || function->shared()->strict_mode()) {
+ if (function->IsBuiltin() || !function->shared()->is_classic_mode()) {
Label fast;
// Check that the object is a boolean.
__ CompareRoot(rdx, Heap::kTrueValueRootIndex);
@@ -2191,7 +2236,7 @@ Handle<Code> CallStubCompiler::CompileCallGlobal(
__ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
}
- // Setup the context (function already in rdi).
+ // Set up the context (function already in rdi).
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
@@ -2255,13 +2300,9 @@ Handle<Code> StoreStubCompiler::CompileStoreCallback(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(rdx, &miss);
-
// Check that the map of the object hasn't changed.
- __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
- Handle<Map>(object->map()));
- __ j(not_equal, &miss);
+ __ CheckMap(rdx, Handle<Map>(object->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (object->IsJSGlobalProxy()) {
@@ -2305,13 +2346,9 @@ Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
// -----------------------------------
Label miss;
- // Check that the object isn't a smi.
- __ JumpIfSmi(rdx, &miss);
-
// Check that the map of the object hasn't changed.
- __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
- Handle<Map>(receiver->map()));
- __ j(not_equal, &miss);
+ __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss,
+ DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS);
// Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) {
@@ -2374,23 +2411,9 @@ Handle<Code> StoreStubCompiler::CompileStoreGlobal(
// Store the value in the cell.
__ movq(cell_operand, rax);
- Label done;
- __ JumpIfSmi(rax, &done);
-
- __ movq(rcx, rax);
- __ lea(rdx, cell_operand);
- // Cells are always in the remembered set.
- __ RecordWrite(rbx, // Object.
- rdx, // Address.
- rcx, // Value.
- kDontSaveFPRegs,
- OMIT_REMEMBERED_SET,
- OMIT_SMI_CHECK);
-
+ // Cells are always rescanned, so no write barrier here.
// Return the value (register rax).
- __ bind(&done);
-
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->named_store_global_inline(), 1);
__ ret(0);
@@ -2451,7 +2474,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
ElementsKind elements_kind = receiver_map->elements_kind();
bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
Handle<Code> stub =
- KeyedStoreElementStub(is_js_array, elements_kind).GetCode();
+ KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode();
__ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK);
@@ -2582,7 +2605,7 @@ Handle<Code> LoadStubCompiler::CompileLoadCallback(
Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object,
Handle<JSObject> holder,
- Handle<Object> value,
+ Handle<JSFunction> value,
Handle<String> name) {
// ----------- S t a t e -------------
// -- rax : receiver
@@ -2637,14 +2660,8 @@ Handle<Code> LoadStubCompiler::CompileLoadGlobal(
// -----------------------------------
Label miss;
- // If the object is the holder then we know that it's a global
- // object which can only happen for contextual loads. In this case,
- // the receiver cannot be a smi.
- if (!object.is_identical_to(holder)) {
- __ JumpIfSmi(rax, &miss);
- }
-
// Check that the maps haven't changed.
+ __ JumpIfSmi(rax, &miss);
CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
// Get the value from the cell.
@@ -2736,7 +2753,7 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant(
Handle<String> name,
Handle<JSObject> receiver,
Handle<JSObject> holder,
- Handle<Object> value) {
+ Handle<JSFunction> value) {
// ----------- S t a t e -------------
// -- rax : key
// -- rdx : receiver
@@ -3509,14 +3526,16 @@ void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement(
void KeyedStoreStubCompiler::GenerateStoreFastElement(
MacroAssembler* masm,
bool is_js_array,
- ElementsKind elements_kind) {
+ ElementsKind elements_kind,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : key
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, finish_store, grow;
+ Label check_capacity, slow;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -3524,23 +3543,31 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
// Check that the key is a smi.
__ JumpIfNotSmi(rcx, &miss_force_generic);
+ if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
+ __ JumpIfNotSmi(rax, &transition_elements_kind);
+ }
+
// Get the elements array and make sure it is a fast element array, not 'cow'.
__ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
- __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
- Heap::kFixedArrayMapRootIndex);
- __ j(not_equal, &miss_force_generic);
-
// Check that the key is within bounds.
if (is_js_array) {
__ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
- __ j(above_equal, &miss_force_generic);
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ j(above_equal, &grow);
+ } else {
+ __ j(above_equal, &miss_force_generic);
+ }
} else {
__ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
__ j(above_equal, &miss_force_generic);
}
+ __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
+ Heap::kFixedArrayMapRootIndex);
+ __ j(not_equal, &miss_force_generic);
+
+ __ bind(&finish_store);
if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
- __ JumpIfNotSmi(rax, &transition_elements_kind);
__ SmiToInteger32(rcx, rcx);
__ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
rax);
@@ -3552,8 +3579,8 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize));
__ movq(Operand(rcx, 0), rax);
// Make sure to preserve the value in register rax.
- __ movq(rdx, rax);
- __ RecordWrite(rdi, rcx, rdx, kDontSaveFPRegs);
+ __ movq(rbx, rax);
+ __ RecordWrite(rdi, rcx, rbx, kDontSaveFPRegs);
}
// Done.
@@ -3568,19 +3595,89 @@ void KeyedStoreStubCompiler::GenerateStoreFastElement(
__ bind(&transition_elements_kind);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ jmp(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags are already set by previous
+ // compare.
+ __ j(not_equal, &miss_force_generic);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
+ __ j(not_equal, &check_capacity);
+
+ int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
+
+ // rax: value
+ // rcx: key
+ // rdx: receiver
+ // rdi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ Move(FieldOperand(rdi, JSObject::kMapOffset),
+ masm->isolate()->factory()->fixed_array_map());
+ __ Move(FieldOperand(rdi, FixedArray::kLengthOffset),
+ Smi::FromInt(JSArray::kPreallocatedArrayElements));
+ __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
+ for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) {
+ __ movq(FieldOperand(rdi, FixedArray::SizeFor(i)), rbx);
+ }
+
+ // Store the element at index zero.
+ __ movq(FieldOperand(rdi, FixedArray::SizeFor(0)), rax);
+
+ // Install the new backing store in the JSArray.
+ __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
+ __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1));
+ __ ret(0);
+
+ __ bind(&check_capacity);
+ // Check for cow elements, in general they are not handled by this stub.
+ __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset),
+ Heap::kFixedCOWArrayMapRootIndex);
+ __ j(equal, &miss_force_generic);
+
+ // rax: value
+ // rcx: key
+ // rdx: receiver
+ // rdi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ cmpq(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
+ __ j(above_equal, &slow);
+
+ // Grow the array and finish the store.
+ __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
+ Smi::FromInt(1));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ jmp(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
MacroAssembler* masm,
- bool is_js_array) {
+ bool is_js_array,
+ KeyedAccessGrowMode grow_mode) {
// ----------- S t a t e -------------
// -- rax : value
// -- rcx : key
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- Label miss_force_generic, transition_elements_kind;
+ Label miss_force_generic, transition_elements_kind, finish_store;
+ Label grow, slow, check_capacity;
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
@@ -3594,13 +3691,19 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
// Check that the key is within bounds.
if (is_js_array) {
- __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
+ __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
+ if (grow_mode == ALLOW_JSARRAY_GROWTH) {
+ __ j(above_equal, &grow);
+ } else {
+ __ j(above_equal, &miss_force_generic);
+ }
} else {
__ SmiCompare(rcx, FieldOperand(rdi, FixedDoubleArray::kLengthOffset));
+ __ j(above_equal, &miss_force_generic);
}
- __ j(above_equal, &miss_force_generic);
// Handle smi values specially
+ __ bind(&finish_store);
__ SmiToInteger32(rcx, rcx);
__ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0,
&transition_elements_kind);
@@ -3617,6 +3720,71 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ Integer32ToSmi(rcx, rcx);
Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss();
__ jmp(ic_miss, RelocInfo::CODE_TARGET);
+
+ if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) {
+ // Grow the array by a single element if possible.
+ __ bind(&grow);
+
+ // Make sure the array is only growing by a single element, anything else
+ // must be handled by the runtime. Flags are already set by previous
+ // compare.
+ __ j(not_equal, &miss_force_generic);
+
+ // Transition on values that can't be stored in a FixedDoubleArray.
+ Label value_is_smi;
+ __ JumpIfSmi(rax, &value_is_smi);
+ __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
+ Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, &transition_elements_kind);
+ __ bind(&value_is_smi);
+
+ // Check for the empty array, and preallocate a small backing store if
+ // possible.
+ __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
+ __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex);
+ __ j(not_equal, &check_capacity);
+
+ int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
+ __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
+
+ // rax: value
+ // rcx: key
+ // rdx: receiver
+ // rdi: elements
+ // Initialize the new FixedDoubleArray. Leave elements unitialized for
+ // efficiency, they are guaranteed to be initialized before use.
+ __ Move(FieldOperand(rdi, JSObject::kMapOffset),
+ masm->isolate()->factory()->fixed_double_array_map());
+ __ Move(FieldOperand(rdi, FixedDoubleArray::kLengthOffset),
+ Smi::FromInt(JSArray::kPreallocatedArrayElements));
+
+ // Install the new backing store in the JSArray.
+ __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
+ __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
+
+ // Increment the length of the array.
+ __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1));
+ __ jmp(&finish_store);
+
+ __ bind(&check_capacity);
+ // rax: value
+ // rcx: key
+ // rdx: receiver
+ // rdi: elements
+ // Make sure that the backing store can hold additional elements.
+ __ cmpq(rcx, FieldOperand(rdi, FixedDoubleArray::kLengthOffset));
+ __ j(above_equal, &slow);
+
+ // Grow the array and finish the store.
+ __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
+ Smi::FromInt(1));
+ __ jmp(&finish_store);
+
+ __ bind(&slow);
+ Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow();
+ __ jmp(ic_slow, RelocInfo::CODE_TARGET);
+ }
}
diff --git a/src/3rdparty/v8/src/zone-inl.h b/src/3rdparty/v8/src/zone-inl.h
index 4870105..ee96ec0 100644
--- a/src/3rdparty/v8/src/zone-inl.h
+++ b/src/3rdparty/v8/src/zone-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -28,31 +28,30 @@
#ifndef V8_ZONE_INL_H_
#define V8_ZONE_INL_H_
-#include "isolate.h"
#include "zone.h"
+
+#include "counters.h"
+#include "isolate.h"
+#include "utils.h"
#include "v8-counters.h"
namespace v8 {
namespace internal {
-AssertNoZoneAllocation::AssertNoZoneAllocation()
- : prev_(Isolate::Current()->zone_allow_allocation()) {
- Isolate::Current()->set_zone_allow_allocation(false);
-}
-
-
-AssertNoZoneAllocation::~AssertNoZoneAllocation() {
- Isolate::Current()->set_zone_allow_allocation(prev_);
-}
-
-
inline void* Zone::New(int size) {
- ASSERT(Isolate::Current()->zone_allow_allocation());
ASSERT(ZoneScope::nesting() > 0);
// Round up the requested size to fit the alignment.
size = RoundUp(size, kAlignment);
+ // If the allocation size is divisible by 8 then we return an 8-byte aligned
+ // address.
+ if (kPointerSize == 4 && kAlignment == 4) {
+ position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4);
+ } else {
+ ASSERT(kAlignment >= kPointerSize);
+ }
+
// Check if the requested size is available without expanding.
Address result = position_;
diff --git a/src/3rdparty/v8/src/zone.cc b/src/3rdparty/v8/src/zone.cc
index 2d14d13..d5d05ab 100644
--- a/src/3rdparty/v8/src/zone.cc
+++ b/src/3rdparty/v8/src/zone.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,10 +25,10 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include "v8.h"
+#include <string.h>
+#include "v8.h"
#include "zone-inl.h"
-#include "splay-tree-inl.h"
namespace v8 {
namespace internal {
diff --git a/src/3rdparty/v8/src/zone.h b/src/3rdparty/v8/src/zone.h
index f60ac0d..8648465 100644
--- a/src/3rdparty/v8/src/zone.h
+++ b/src/3rdparty/v8/src/zone.h
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,6 +29,11 @@
#define V8_ZONE_H_
#include "allocation.h"
+#include "checks.h"
+#include "hashmap.h"
+#include "globals.h"
+#include "list.h"
+#include "splay-tree.h"
namespace v8 {
namespace internal {
@@ -42,6 +47,7 @@ enum ZoneScopeMode {
};
class Segment;
+class Isolate;
// The Zone supports very fast allocation of small chunks of
// memory. The chunks cannot be deallocated individually, but instead
@@ -86,7 +92,9 @@ class Zone {
friend class Isolate;
friend class ZoneScope;
- // All pointers returned from New() have this alignment.
+ // All pointers returned from New() have this alignment. In addition, if the
+ // object being allocated has a size that is divisible by 8 then its alignment
+ // will be 8.
static const int kAlignment = kPointerSize;
// Never allocate segments smaller than this size in bytes.
@@ -156,15 +164,6 @@ class ZoneObject {
};
-class AssertNoZoneAllocation {
- public:
- inline AssertNoZoneAllocation();
- inline ~AssertNoZoneAllocation();
- private:
- bool prev_;
-};
-
-
// The ZoneListAllocationPolicy is used to specialize the GenericList
// implementation to allocate ZoneLists and their elements in the
// Zone.
@@ -241,6 +240,8 @@ class ZoneSplayTree: public SplayTree<Config, ZoneListAllocationPolicy> {
};
+typedef TemplateHashMapImpl<ZoneListAllocationPolicy> ZoneHashMap;
+
} } // namespace v8::internal
#endif // V8_ZONE_H_
diff --git a/src/3rdparty/v8/test/cctest/SConscript b/src/3rdparty/v8/test/cctest/SConscript
index 621d8ec..bcd1e98 100644
--- a/src/3rdparty/v8/test/cctest/SConscript
+++ b/src/3rdparty/v8/test/cctest/SConscript
@@ -1,4 +1,4 @@
-# Copyright 2008 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -62,6 +62,7 @@ SOURCES = {
'test-conversions.cc',
'test-cpu-profiler.cc',
'test-dataflow.cc',
+ 'test-date.cc',
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
@@ -73,6 +74,7 @@ SOURCES = {
'test-fixed-dtoa.cc',
'test-flags.cc',
'test-func-name-inference.cc',
+ 'test-hashing.cc',
'test-hashmap.cc',
'test-heap-profiler.cc',
'test-heap.cc',
@@ -85,6 +87,7 @@ SOURCES = {
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
+ 'test-random.cc',
'test-regexp.cc',
'test-reloc-info.cc',
'test-serialize.cc',
@@ -110,7 +113,8 @@ SOURCES = {
],
'arch:x64': ['test-assembler-x64.cc',
'test-macro-assembler-x64.cc',
- 'test-log-stack-tracer.cc'],
+ 'test-log-stack-tracer.cc',
+ 'test-disasm-x64.cc'],
'arch:mips': ['test-assembler-mips.cc',
'test-disasm-mips.cc'],
'os:linux': ['test-platform-linux.cc'],
diff --git a/src/3rdparty/v8/test/cctest/cctest.gyp b/src/3rdparty/v8/test/cctest/cctest.gyp
index efcbad7..a242fe3 100644
--- a/src/3rdparty/v8/test/cctest/cctest.gyp
+++ b/src/3rdparty/v8/test/cctest/cctest.gyp
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -57,6 +57,7 @@
'test-conversions.cc',
'test-cpu-profiler.cc',
'test-dataflow.cc',
+ 'test-date.cc',
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
@@ -68,6 +69,7 @@
'test-fixed-dtoa.cc',
'test-flags.cc',
'test-func-name-inference.cc',
+ 'test-hashing.cc',
'test-hashmap.cc',
'test-heap.cc',
'test-heap-profiler.cc',
@@ -80,6 +82,7 @@
'test-parsing.cc',
'test-platform-tls.cc',
'test-profile-generator.cc',
+ 'test-random.cc',
'test-regexp.cc',
'test-reloc-info.cc',
'test-serialize.cc',
diff --git a/src/3rdparty/v8/test/cctest/cctest.h b/src/3rdparty/v8/test/cctest/cctest.h
index c04d893..0b93562 100644
--- a/src/3rdparty/v8/test/cctest/cctest.h
+++ b/src/3rdparty/v8/test/cctest/cctest.h
@@ -104,7 +104,7 @@ class ApiTestFuzzer: public v8::internal::Thread {
FOURTH_PART,
LAST_PART = FOURTH_PART };
- static void Setup(PartOfTest part);
+ static void SetUp(PartOfTest part);
static void RunAllTests();
static void TearDown();
// This method switches threads if we are running the Threading test.
diff --git a/src/3rdparty/v8/test/cctest/cctest.status b/src/3rdparty/v8/test/cctest/cctest.status
index 7161345..af28be1 100644
--- a/src/3rdparty/v8/test/cctest/cctest.status
+++ b/src/3rdparty/v8/test/cctest/cctest.status
@@ -84,10 +84,8 @@ test-debug/DebugBreakLoop: SKIP
##############################################################################
-[ $arch == mips ]
-test-deoptimization: SKIP
-test-serialize: SKIP
+[ $arch == mips && $crankshaft ]
-# Tests that may time out.
-test-api/ExternalArrays: PASS || TIMEOUT
-test-api/Threading: PASS || TIMEOUT
+# Tests that time out with crankshaft.
+test-debug/ThreadedDebugging: SKIP
+test-debug/DebugBreakLoop: SKIP
diff --git a/src/3rdparty/v8/test/cctest/test-alloc.cc b/src/3rdparty/v8/test/cctest/test-alloc.cc
index 899c902..769fe7b 100644
--- a/src/3rdparty/v8/test/cctest/test-alloc.cc
+++ b/src/3rdparty/v8/test/cctest/test-alloc.cc
@@ -88,7 +88,7 @@ static MaybeObject* AllocateAfterFailures() {
static const int kLargeObjectSpaceFillerLength = 300000;
static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
kLargeObjectSpaceFillerLength);
- ASSERT(kLargeObjectSpaceFillerSize > heap->MaxObjectSizeInPagedSpace());
+ ASSERT(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)->
IsFailure());
@@ -203,10 +203,10 @@ class Block {
TEST(CodeRange) {
const int code_range_size = 32*MB;
- OS::Setup();
+ OS::SetUp();
Isolate::Current()->InitializeLoggingAndCounters();
CodeRange* code_range = new CodeRange(Isolate::Current());
- code_range->Setup(code_range_size);
+ code_range->SetUp(code_range_size);
int current_allocated = 0;
int total_allocated = 0;
List<Block> blocks(1000);
@@ -214,11 +214,13 @@ TEST(CodeRange) {
while (total_allocated < 5 * code_range_size) {
if (current_allocated < code_range_size / 10) {
// Allocate a block.
- // Geometrically distributed sizes, greater than Page::kMaxHeapObjectSize.
+ // Geometrically distributed sizes, greater than
+ // Page::kMaxNonCodeHeapObjectSize (which is greater than code page area).
// TODO(gc): instead of using 3 use some contant based on code_range_size
// kMaxHeapObjectSize.
- size_t requested = (Page::kMaxHeapObjectSize << (Pseudorandom() % 3)) +
- Pseudorandom() % 5000 + 1;
+ size_t requested =
+ (Page::kMaxNonCodeHeapObjectSize << (Pseudorandom() % 3)) +
+ Pseudorandom() % 5000 + 1;
size_t allocated = 0;
Address base = code_range->AllocateRawMemory(requested, &allocated);
CHECK(base != NULL);
diff --git a/src/3rdparty/v8/test/cctest/test-api.cc b/src/3rdparty/v8/test/cctest/test-api.cc
index b394e95..b1a23c1 100644
--- a/src/3rdparty/v8/test/cctest/test-api.cc
+++ b/src/3rdparty/v8/test/cctest/test-api.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -450,8 +450,7 @@ THREADED_TEST(ScriptMakingExternalString) {
CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
- // TODO(1608): This should use kAbortIncrementalMarking.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, dispose_count);
}
@@ -477,8 +476,7 @@ THREADED_TEST(ScriptMakingExternalAsciiString) {
CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
- // TODO(1608): This should use kAbortIncrementalMarking.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(1, dispose_count);
}
@@ -491,7 +489,7 @@ TEST(MakingExternalStringConditions) {
HEAP->CollectGarbage(i::NEW_SPACE);
HEAP->CollectGarbage(i::NEW_SPACE);
- uint16_t* two_byte_string = AsciiToTwoByteString("small");
+ uint16_t* two_byte_string = AsciiToTwoByteString("s1");
Local<String> small_string = String::New(two_byte_string);
i::DeleteArray(two_byte_string);
@@ -503,7 +501,7 @@ TEST(MakingExternalStringConditions) {
// Old space strings should be accepted.
CHECK(small_string->CanMakeExternal());
- two_byte_string = AsciiToTwoByteString("small 2");
+ two_byte_string = AsciiToTwoByteString("small string 2");
small_string = String::New(two_byte_string);
i::DeleteArray(two_byte_string);
@@ -537,7 +535,7 @@ TEST(MakingExternalAsciiStringConditions) {
HEAP->CollectGarbage(i::NEW_SPACE);
HEAP->CollectGarbage(i::NEW_SPACE);
- Local<String> small_string = String::New("small");
+ Local<String> small_string = String::New("s1");
// We should refuse to externalize newly created small string.
CHECK(!small_string->CanMakeExternal());
// Trigger GCs so that the newly allocated string moves to old gen.
@@ -546,7 +544,7 @@ TEST(MakingExternalAsciiStringConditions) {
// Old space strings should be accepted.
CHECK(small_string->CanMakeExternal());
- small_string = String::New("small 2");
+ small_string = String::New("small string 2");
// We should refuse externalizing newly created small string.
CHECK(!small_string->CanMakeExternal());
for (int i = 0; i < 100; i++) {
@@ -1189,7 +1187,6 @@ THREADED_TEST(GlobalPrototype) {
templ->Set("x", v8_num(200));
templ->SetAccessor(v8_str("m"), GetM);
LocalContext env(0, templ);
- v8::Handle<v8::Object> obj(env->Global());
v8::Handle<Script> script(v8_compile("dummy()"));
v8::Handle<Value> result(script->Run());
CHECK_EQ(13.4, result->NumberValue());
@@ -1423,6 +1420,40 @@ THREADED_TEST(EmptyInterceptorDoesNotAffectJSProperties) {
THREADED_TEST(SwitchFromInterceptorToAccessor) {
v8::HandleScope scope;
+ Handle<FunctionTemplate> templ = FunctionTemplate::New();
+ AddAccessor(templ, v8_str("age"),
+ SimpleAccessorGetter, SimpleAccessorSetter);
+ AddInterceptor(templ, InterceptorGetter, InterceptorSetter);
+ LocalContext env;
+ env->Global()->Set(v8_str("Obj"), templ->GetFunction());
+ CompileRun("var obj = new Obj;"
+ "function setAge(i){ obj.age = i; };"
+ "for(var i = 0; i <= 10000; i++) setAge(i);");
+ // All i < 10000 go to the interceptor.
+ ExpectInt32("obj.interceptor_age", 9999);
+ // The last i goes to the accessor.
+ ExpectInt32("obj.accessor_age", 10000);
+}
+
+THREADED_TEST(SwitchFromAccessorToInterceptor) {
+ v8::HandleScope scope;
+ Handle<FunctionTemplate> templ = FunctionTemplate::New();
+ AddAccessor(templ, v8_str("age"),
+ SimpleAccessorGetter, SimpleAccessorSetter);
+ AddInterceptor(templ, InterceptorGetter, InterceptorSetter);
+ LocalContext env;
+ env->Global()->Set(v8_str("Obj"), templ->GetFunction());
+ CompileRun("var obj = new Obj;"
+ "function setAge(i){ obj.age = i; };"
+ "for(var i = 20000; i >= 9999; i--) setAge(i);");
+ // All i >= 10000 go to the accessor.
+ ExpectInt32("obj.accessor_age", 10000);
+ // The last i goes to the interceptor.
+ ExpectInt32("obj.interceptor_age", 9999);
+}
+
+THREADED_TEST(SwitchFromInterceptorToAccessorWithInheritance) {
+ v8::HandleScope scope;
Handle<FunctionTemplate> parent = FunctionTemplate::New();
Handle<FunctionTemplate> child = FunctionTemplate::New();
child->Inherit(parent);
@@ -1440,7 +1471,7 @@ THREADED_TEST(SwitchFromInterceptorToAccessor) {
ExpectInt32("child.accessor_age", 10000);
}
-THREADED_TEST(SwitchFromAccessorToInterceptor) {
+THREADED_TEST(SwitchFromAccessorToInterceptorWithInheritance) {
v8::HandleScope scope;
Handle<FunctionTemplate> parent = FunctionTemplate::New();
Handle<FunctionTemplate> child = FunctionTemplate::New();
@@ -1459,6 +1490,54 @@ THREADED_TEST(SwitchFromAccessorToInterceptor) {
ExpectInt32("child.interceptor_age", 9999);
}
+THREADED_TEST(SwitchFromInterceptorToJSAccessor) {
+ v8::HandleScope scope;
+ Handle<FunctionTemplate> templ = FunctionTemplate::New();
+ AddInterceptor(templ, InterceptorGetter, InterceptorSetter);
+ LocalContext env;
+ env->Global()->Set(v8_str("Obj"), templ->GetFunction());
+ CompileRun("var obj = new Obj;"
+ "function setter(i) { this.accessor_age = i; };"
+ "function getter() { return this.accessor_age; };"
+ "function setAge(i) { obj.age = i; };"
+ "Object.defineProperty(obj, 'age', { get:getter, set:setter });"
+ "for(var i = 0; i <= 10000; i++) setAge(i);");
+ // All i < 10000 go to the interceptor.
+ ExpectInt32("obj.interceptor_age", 9999);
+ // The last i goes to the JavaScript accessor.
+ ExpectInt32("obj.accessor_age", 10000);
+ // The installed JavaScript getter is still intact.
+ // This last part is a regression test for issue 1651 and relies on the fact
+ // that both interceptor and accessor are being installed on the same object.
+ ExpectInt32("obj.age", 10000);
+ ExpectBoolean("obj.hasOwnProperty('age')", true);
+ ExpectUndefined("Object.getOwnPropertyDescriptor(obj, 'age').value");
+}
+
+THREADED_TEST(SwitchFromJSAccessorToInterceptor) {
+ v8::HandleScope scope;
+ Handle<FunctionTemplate> templ = FunctionTemplate::New();
+ AddInterceptor(templ, InterceptorGetter, InterceptorSetter);
+ LocalContext env;
+ env->Global()->Set(v8_str("Obj"), templ->GetFunction());
+ CompileRun("var obj = new Obj;"
+ "function setter(i) { this.accessor_age = i; };"
+ "function getter() { return this.accessor_age; };"
+ "function setAge(i) { obj.age = i; };"
+ "Object.defineProperty(obj, 'age', { get:getter, set:setter });"
+ "for(var i = 20000; i >= 9999; i--) setAge(i);");
+ // All i >= 10000 go to the accessor.
+ ExpectInt32("obj.accessor_age", 10000);
+ // The last i goes to the interceptor.
+ ExpectInt32("obj.interceptor_age", 9999);
+ // The installed JavaScript getter is still intact.
+ // This last part is a regression test for issue 1651 and relies on the fact
+ // that both interceptor and accessor are being installed on the same object.
+ ExpectInt32("obj.age", 10000);
+ ExpectBoolean("obj.hasOwnProperty('age')", true);
+ ExpectUndefined("Object.getOwnPropertyDescriptor(obj, 'age').value");
+}
+
THREADED_TEST(SwitchFromInterceptorToProperty) {
v8::HandleScope scope;
Handle<FunctionTemplate> parent = FunctionTemplate::New();
@@ -1765,7 +1844,7 @@ THREADED_TEST(DeepCrossLanguageRecursion) {
env->Global()->Set(v8_str("depth"), v8::Integer::New(0));
call_recursively_script = v8_compile("callScriptRecursively()");
- v8::Handle<Value> result(call_recursively_script->Run());
+ call_recursively_script->Run();
call_recursively_script = v8::Handle<Script>();
env->Global()->Set(v8_str("depth"), v8::Integer::New(0));
@@ -2172,9 +2251,8 @@ THREADED_TEST(ApiObjectGroups) {
V8::AddObjectGroup(g2_objects, 2);
V8::AddImplicitReferences(g2s2, g2_children, 1);
}
- // Do a single full GC. Use kMakeHeapIterableMask to ensure that
- // incremental garbage collection is stopped.
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ // Do a single full GC, ensure incremental marking is stopped.
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@@ -2198,7 +2276,7 @@ THREADED_TEST(ApiObjectGroups) {
V8::AddImplicitReferences(g2s2, g2_children, 1);
}
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, counter.NumberOfWeakCalls());
@@ -2207,7 +2285,7 @@ THREADED_TEST(ApiObjectGroups) {
g1c1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
g2c1.MakeWeak(reinterpret_cast<void*>(&counter), &WeakPointerCallback);
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
CHECK_EQ(7, counter.NumberOfWeakCalls());
}
@@ -2263,7 +2341,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
V8::AddImplicitReferences(g3s1, g3_children, 1);
}
// Do a single full GC
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All object should be alive.
CHECK_EQ(0, counter.NumberOfWeakCalls());
@@ -2287,7 +2365,7 @@ THREADED_TEST(ApiObjectGroupsCycle) {
V8::AddImplicitReferences(g3s1, g3_children, 1);
}
- HEAP->CollectAllGarbage(i::Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
// All objects should be gone. 7 global handles in total.
CHECK_EQ(7, counter.NumberOfWeakCalls());
@@ -2785,6 +2863,16 @@ THREADED_TEST(isNumberType) {
obj = env->Global()->Get(v8_str("obj"));
CHECK(!obj->IsInt32());
CHECK(!obj->IsUint32());
+ // Positive zero
+ CompileRun("var obj = 0.0;");
+ obj = env->Global()->Get(v8_str("obj"));
+ CHECK(obj->IsInt32());
+ CHECK(obj->IsUint32());
+ // Positive zero
+ CompileRun("var obj = -0.0;");
+ obj = env->Global()->Get(v8_str("obj"));
+ CHECK(!obj->IsInt32());
+ CHECK(!obj->IsUint32());
}
@@ -4384,7 +4472,7 @@ THREADED_TEST(ExtensibleOnUndetectable) {
source = v8_str("undetectable.y = 2000;");
script = Script::Compile(source);
- Local<Value> result(script->Run());
+ script->Run();
ExpectBoolean("undetectable.y == undefined", true);
}
@@ -4737,9 +4825,10 @@ THREADED_TEST(NativeFunctionDeclarationError) {
const char* extension_names[] = { name };
v8::ExtensionConfiguration extensions(1, extension_names);
v8::Handle<Context> context(Context::New(&extensions));
- ASSERT(context.IsEmpty());
+ CHECK(context.IsEmpty());
}
+
THREADED_TEST(NativeFunctionDeclarationErrorEscape) {
v8::HandleScope handle_scope;
const char* name = "nativedeclerresc";
@@ -4751,7 +4840,7 @@ THREADED_TEST(NativeFunctionDeclarationErrorEscape) {
const char* extension_names[] = { name };
v8::ExtensionConfiguration extensions(1, extension_names);
v8::Handle<Context> context(Context::New(&extensions));
- ASSERT(context.IsEmpty());
+ CHECK(context.IsEmpty());
}
@@ -4917,7 +5006,7 @@ TEST(RegexpOutOfMemory) {
Local<Script> script =
Script::Compile(String::New(js_code_causing_huge_string_flattening));
last_location = NULL;
- Local<Value> result(script->Run());
+ script->Run();
CHECK(false); // Should not return.
}
@@ -5437,6 +5526,17 @@ static int StrNCmp16(uint16_t* a, uint16_t* b, int n) {
}
+int GetUtf8Length(Handle<String> str) {
+ int len = str->Utf8Length();
+ if (len < 0) {
+ i::Handle<i::String> istr(v8::Utils::OpenHandle(*str));
+ i::FlattenString(istr);
+ len = str->Utf8Length();
+ }
+ return len;
+}
+
+
THREADED_TEST(StringWrite) {
LocalContext context;
v8::HandleScope scope;
@@ -5517,7 +5617,7 @@ THREADED_TEST(StringWrite) {
CHECK_EQ(0, strncmp(utf8buf, "ab\1", 3));
memset(utf8buf, 0x1, sizeof(utf8buf));
- len = left_tree->Utf8Length();
+ len = GetUtf8Length(left_tree);
int utf8_expected =
(0x80 + (0x800 - 0x80) * 2 + (0xd800 - 0x800) * 3) / kStride;
CHECK_EQ(utf8_expected, len);
@@ -5531,7 +5631,7 @@ THREADED_TEST(StringWrite) {
CHECK_EQ(1, utf8buf[utf8_expected]);
memset(utf8buf, 0x1, sizeof(utf8buf));
- len = right_tree->Utf8Length();
+ len = GetUtf8Length(right_tree);
CHECK_EQ(utf8_expected, len);
len = right_tree->WriteUtf8(utf8buf, utf8_expected, &charlen);
CHECK_EQ(utf8_expected, len);
@@ -5656,6 +5756,225 @@ THREADED_TEST(StringWrite) {
}
+static void Utf16Helper(
+ LocalContext& context,
+ const char* name,
+ const char* lengths_name,
+ int len) {
+ Local<v8::Array> a =
+ Local<v8::Array>::Cast(context->Global()->Get(v8_str(name)));
+ Local<v8::Array> alens =
+ Local<v8::Array>::Cast(context->Global()->Get(v8_str(lengths_name)));
+ for (int i = 0; i < len; i++) {
+ Local<v8::String> string =
+ Local<v8::String>::Cast(a->Get(i));
+ Local<v8::Number> expected_len =
+ Local<v8::Number>::Cast(alens->Get(i));
+ CHECK_EQ(expected_len->Value() != string->Length(),
+ string->MayContainNonAscii());
+ int length = GetUtf8Length(string);
+ CHECK_EQ(static_cast<int>(expected_len->Value()), length);
+ }
+}
+
+
+static uint16_t StringGet(Handle<String> str, int index) {
+ i::Handle<i::String> istring =
+ v8::Utils::OpenHandle(String::Cast(*str));
+ return istring->Get(index);
+}
+
+
+static void WriteUtf8Helper(
+ LocalContext& context,
+ const char* name,
+ const char* lengths_name,
+ int len) {
+ Local<v8::Array> b =
+ Local<v8::Array>::Cast(context->Global()->Get(v8_str(name)));
+ Local<v8::Array> alens =
+ Local<v8::Array>::Cast(context->Global()->Get(v8_str(lengths_name)));
+ char buffer[1000];
+ char buffer2[1000];
+ for (int i = 0; i < len; i++) {
+ Local<v8::String> string =
+ Local<v8::String>::Cast(b->Get(i));
+ Local<v8::Number> expected_len =
+ Local<v8::Number>::Cast(alens->Get(i));
+ int utf8_length = static_cast<int>(expected_len->Value());
+ for (int j = utf8_length + 1; j >= 0; j--) {
+ memset(reinterpret_cast<void*>(&buffer), 42, sizeof(buffer));
+ memset(reinterpret_cast<void*>(&buffer2), 42, sizeof(buffer2));
+ int nchars;
+ int utf8_written =
+ string->WriteUtf8(buffer, j, &nchars, String::NO_OPTIONS);
+ int utf8_written2 =
+ string->WriteUtf8(buffer2, j, &nchars, String::NO_NULL_TERMINATION);
+ CHECK_GE(utf8_length + 1, utf8_written);
+ CHECK_GE(utf8_length, utf8_written2);
+ for (int k = 0; k < utf8_written2; k++) {
+ CHECK_EQ(buffer[k], buffer2[k]);
+ }
+ CHECK(nchars * 3 >= utf8_written - 1);
+ CHECK(nchars <= utf8_written);
+ if (j == utf8_length + 1) {
+ CHECK_EQ(utf8_written2, utf8_length);
+ CHECK_EQ(utf8_written2 + 1, utf8_written);
+ }
+ CHECK_EQ(buffer[utf8_written], 42);
+ if (j > utf8_length) {
+ if (utf8_written != 0) CHECK_EQ(buffer[utf8_written - 1], 0);
+ if (utf8_written > 1) CHECK_NE(buffer[utf8_written - 2], 42);
+ Handle<String> roundtrip = v8_str(buffer);
+ CHECK(roundtrip->Equals(string));
+ } else {
+ if (utf8_written != 0) CHECK_NE(buffer[utf8_written - 1], 42);
+ }
+ if (utf8_written2 != 0) CHECK_NE(buffer[utf8_written - 1], 42);
+ if (nchars >= 2) {
+ uint16_t trail = StringGet(string, nchars - 1);
+ uint16_t lead = StringGet(string, nchars - 2);
+ if (((lead & 0xfc00) == 0xd800) &&
+ ((trail & 0xfc00) == 0xdc00)) {
+ unsigned char u1 = buffer2[utf8_written2 - 4];
+ unsigned char u2 = buffer2[utf8_written2 - 3];
+ unsigned char u3 = buffer2[utf8_written2 - 2];
+ unsigned char u4 = buffer2[utf8_written2 - 1];
+ CHECK_EQ((u1 & 0xf8), 0xf0);
+ CHECK_EQ((u2 & 0xc0), 0x80);
+ CHECK_EQ((u3 & 0xc0), 0x80);
+ CHECK_EQ((u4 & 0xc0), 0x80);
+ uint32_t c = 0x10000 + ((lead & 0x3ff) << 10) + (trail & 0x3ff);
+ CHECK_EQ((u4 & 0x3f), (c & 0x3f));
+ CHECK_EQ((u3 & 0x3f), ((c >> 6) & 0x3f));
+ CHECK_EQ((u2 & 0x3f), ((c >> 12) & 0x3f));
+ CHECK_EQ((u1 & 0x3), c >> 18);
+ }
+ }
+ }
+ }
+}
+
+
+THREADED_TEST(Utf16) {
+ LocalContext context;
+ v8::HandleScope scope;
+ CompileRun(
+ "var pad = '01234567890123456789';"
+ "var p = [];"
+ "var plens = [20, 3, 3];"
+ "p.push('01234567890123456789');"
+ "var lead = 0xd800;"
+ "var trail = 0xdc00;"
+ "p.push(String.fromCharCode(0xd800));"
+ "p.push(String.fromCharCode(0xdc00));"
+ "var a = [];"
+ "var b = [];"
+ "var c = [];"
+ "var alens = [];"
+ "for (var i = 0; i < 3; i++) {"
+ " p[1] = String.fromCharCode(lead++);"
+ " for (var j = 0; j < 3; j++) {"
+ " p[2] = String.fromCharCode(trail++);"
+ " a.push(p[i] + p[j]);"
+ " b.push(p[i] + p[j]);"
+ " c.push(p[i] + p[j]);"
+ " alens.push(plens[i] + plens[j]);"
+ " }"
+ "}"
+ "alens[5] -= 2;" // Here the surrogate pairs match up.
+ "var a2 = [];"
+ "var b2 = [];"
+ "var c2 = [];"
+ "var a2lens = [];"
+ "for (var m = 0; m < 9; m++) {"
+ " for (var n = 0; n < 9; n++) {"
+ " a2.push(a[m] + a[n]);"
+ " b2.push(b[m] + b[n]);"
+ " var newc = 'x' + c[m] + c[n] + 'y';"
+ " c2.push(newc.substring(1, newc.length - 1));"
+ " var utf = alens[m] + alens[n];" // And here.
+ // The 'n's that start with 0xdc.. are 6-8
+ // The 'm's that end with 0xd8.. are 1, 4 and 7
+ " if ((m % 3) == 1 && n >= 6) utf -= 2;"
+ " a2lens.push(utf);"
+ " }"
+ "}");
+ Utf16Helper(context, "a", "alens", 9);
+ Utf16Helper(context, "a2", "a2lens", 81);
+ WriteUtf8Helper(context, "b", "alens", 9);
+ WriteUtf8Helper(context, "b2", "a2lens", 81);
+ WriteUtf8Helper(context, "c2", "a2lens", 81);
+}
+
+
+static bool SameSymbol(Handle<String> s1, Handle<String> s2) {
+ i::Handle<i::String> is1(v8::Utils::OpenHandle(*s1));
+ i::Handle<i::String> is2(v8::Utils::OpenHandle(*s2));
+ return *is1 == *is2;
+}
+
+
+static void SameSymbolHelper(const char* a, const char* b) {
+ Handle<String> symbol1 = v8::String::NewSymbol(a);
+ Handle<String> symbol2 = v8::String::NewSymbol(b);
+ CHECK(SameSymbol(symbol1, symbol2));
+}
+
+
+THREADED_TEST(Utf16Symbol) {
+ LocalContext context;
+ v8::HandleScope scope;
+
+ Handle<String> symbol1 = v8::String::NewSymbol("abc");
+ Handle<String> symbol2 = v8::String::NewSymbol("abc");
+ CHECK(SameSymbol(symbol1, symbol2));
+
+ SameSymbolHelper("\360\220\220\205", // 4 byte encoding.
+ "\355\240\201\355\260\205"); // 2 3-byte surrogates.
+ SameSymbolHelper("\355\240\201\355\260\206", // 2 3-byte surrogates.
+ "\360\220\220\206"); // 4 byte encoding.
+ SameSymbolHelper("x\360\220\220\205", // 4 byte encoding.
+ "x\355\240\201\355\260\205"); // 2 3-byte surrogates.
+ SameSymbolHelper("x\355\240\201\355\260\206", // 2 3-byte surrogates.
+ "x\360\220\220\206"); // 4 byte encoding.
+ CompileRun(
+ "var sym0 = 'benedictus';"
+ "var sym0b = 'S\303\270ren';"
+ "var sym1 = '\355\240\201\355\260\207';"
+ "var sym2 = '\360\220\220\210';"
+ "var sym3 = 'x\355\240\201\355\260\207';"
+ "var sym4 = 'x\360\220\220\210';"
+ "if (sym1.length != 2) throw sym1;"
+ "if (sym1.charCodeAt(1) != 0xdc07) throw sym1.charCodeAt(1);"
+ "if (sym2.length != 2) throw sym2;"
+ "if (sym2.charCodeAt(1) != 0xdc08) throw sym2.charCodeAt(2);"
+ "if (sym3.length != 3) throw sym3;"
+ "if (sym3.charCodeAt(2) != 0xdc07) throw sym1.charCodeAt(2);"
+ "if (sym4.length != 3) throw sym4;"
+ "if (sym4.charCodeAt(2) != 0xdc08) throw sym2.charCodeAt(2);");
+ Handle<String> sym0 = v8::String::NewSymbol("benedictus");
+ Handle<String> sym0b = v8::String::NewSymbol("S\303\270ren");
+ Handle<String> sym1 = v8::String::NewSymbol("\355\240\201\355\260\207");
+ Handle<String> sym2 = v8::String::NewSymbol("\360\220\220\210");
+ Handle<String> sym3 = v8::String::NewSymbol("x\355\240\201\355\260\207");
+ Handle<String> sym4 = v8::String::NewSymbol("x\360\220\220\210");
+ v8::Local<v8::Object> global = context->Global();
+ Local<Value> s0 = global->Get(v8_str("sym0"));
+ Local<Value> s0b = global->Get(v8_str("sym0b"));
+ Local<Value> s1 = global->Get(v8_str("sym1"));
+ Local<Value> s2 = global->Get(v8_str("sym2"));
+ Local<Value> s3 = global->Get(v8_str("sym3"));
+ Local<Value> s4 = global->Get(v8_str("sym4"));
+ CHECK(SameSymbol(sym0, Handle<String>(String::Cast(*s0))));
+ CHECK(SameSymbol(sym0b, Handle<String>(String::Cast(*s0b))));
+ CHECK(SameSymbol(sym1, Handle<String>(String::Cast(*s1))));
+ CHECK(SameSymbol(sym2, Handle<String>(String::Cast(*s2))));
+ CHECK(SameSymbol(sym3, Handle<String>(String::Cast(*s3))));
+ CHECK(SameSymbol(sym4, Handle<String>(String::Cast(*s4))));
+}
+
+
THREADED_TEST(ToArrayIndex) {
v8::HandleScope scope;
LocalContext context;
@@ -5695,7 +6014,6 @@ THREADED_TEST(ErrorConstruction) {
v8::Handle<String> message = v8_str("message");
v8::Handle<Value> range_error = v8::Exception::RangeError(foo);
CHECK(range_error->IsObject());
- v8::Handle<v8::Object> range_obj(range_error.As<v8::Object>());
CHECK(range_error.As<v8::Object>()->Get(message)->Equals(foo));
v8::Handle<Value> reference_error = v8::Exception::ReferenceError(foo);
CHECK(reference_error->IsObject());
@@ -7265,7 +7583,7 @@ THREADED_TEST(CallKnownGlobalReceiver) {
// Create new environment reusing the global object.
LocalContext env(NULL, instance_template, global_object);
env->Global()->Set(v8_str("foo"), foo);
- Local<Value> value(Script::Compile(v8_str("foo()"))->Run());
+ Script::Compile(v8_str("foo()"))->Run();
}
}
@@ -7453,6 +7771,60 @@ THREADED_TEST(SetPrototype) {
}
+// Getting property names of an object with a prototype chain that
+// triggers dictionary elements in GetLocalPropertyNames() shouldn't
+// crash the runtime.
+THREADED_TEST(Regress91517) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope handle_scope;
+ LocalContext context;
+
+ Local<v8::FunctionTemplate> t1 = v8::FunctionTemplate::New();
+ t1->SetHiddenPrototype(true);
+ t1->InstanceTemplate()->Set(v8_str("foo"), v8_num(1));
+ Local<v8::FunctionTemplate> t2 = v8::FunctionTemplate::New();
+ t2->SetHiddenPrototype(true);
+ t2->InstanceTemplate()->Set(v8_str("fuz1"), v8_num(2));
+ t2->InstanceTemplate()->Set(v8_str("objects"), v8::Object::New());
+ t2->InstanceTemplate()->Set(v8_str("fuz2"), v8_num(2));
+ Local<v8::FunctionTemplate> t3 = v8::FunctionTemplate::New();
+ t3->SetHiddenPrototype(true);
+ t3->InstanceTemplate()->Set(v8_str("boo"), v8_num(3));
+ Local<v8::FunctionTemplate> t4 = v8::FunctionTemplate::New();
+ t4->InstanceTemplate()->Set(v8_str("baz"), v8_num(4));
+
+ // Force dictionary-based properties.
+ i::ScopedVector<char> name_buf(1024);
+ for (int i = 1; i <= 1000; i++) {
+ i::OS::SNPrintF(name_buf, "sdf%d", i);
+ t2->InstanceTemplate()->Set(v8_str(name_buf.start()), v8_num(2));
+ }
+
+ Local<v8::Object> o1 = t1->GetFunction()->NewInstance();
+ Local<v8::Object> o2 = t2->GetFunction()->NewInstance();
+ Local<v8::Object> o3 = t3->GetFunction()->NewInstance();
+ Local<v8::Object> o4 = t4->GetFunction()->NewInstance();
+
+ // Create prototype chain of hidden prototypes.
+ CHECK(o4->SetPrototype(o3));
+ CHECK(o3->SetPrototype(o2));
+ CHECK(o2->SetPrototype(o1));
+
+ // Call the runtime version of GetLocalPropertyNames() on the natively
+ // created object through JavaScript.
+ context->Global()->Set(v8_str("obj"), o4);
+ CompileRun("var names = %GetLocalPropertyNames(obj);");
+
+ ExpectInt32("names.length", 1006);
+ ExpectTrue("names.indexOf(\"baz\") >= 0");
+ ExpectTrue("names.indexOf(\"boo\") >= 0");
+ ExpectTrue("names.indexOf(\"foo\") >= 0");
+ ExpectTrue("names.indexOf(\"fuz1\") >= 0");
+ ExpectTrue("names.indexOf(\"fuz2\") >= 0");
+ ExpectFalse("names[1005] == undefined");
+}
+
+
THREADED_TEST(FunctionReadOnlyPrototype) {
v8::HandleScope handle_scope;
LocalContext context;
@@ -7537,6 +7909,7 @@ THREADED_TEST(Constructor) {
context->Global()->Set(v8_str("Fun"), cons);
Local<v8::Object> inst = cons->NewInstance();
i::Handle<i::JSObject> obj(v8::Utils::OpenHandle(*inst));
+ CHECK(obj->IsJSObject());
Local<Value> value = CompileRun("(new Fun()).constructor === Fun");
CHECK(value->BooleanValue());
}
@@ -7805,7 +8178,7 @@ THREADED_TEST(CrossEval) {
other->SetSecurityToken(token);
current->SetSecurityToken(token);
- // Setup reference from current to other.
+ // Set up reference from current to other.
current->Global()->Set(v8_str("other"), other->Global());
// Check that new variables are introduced in other context.
@@ -7885,7 +8258,7 @@ THREADED_TEST(EvalInDetachedGlobal) {
v8::Persistent<Context> context0 = Context::New();
v8::Persistent<Context> context1 = Context::New();
- // Setup function in context0 that uses eval from context0.
+ // Set up function in context0 that uses eval from context0.
context0->Enter();
v8::Handle<v8::Value> fun =
CompileRun("var x = 42;"
@@ -7923,7 +8296,7 @@ THREADED_TEST(CrossLazyLoad) {
other->SetSecurityToken(token);
current->SetSecurityToken(token);
- // Setup reference from current to other.
+ // Set up reference from current to other.
current->Global()->Set(v8_str("other"), other->Global());
// Trigger lazy loading in other context.
@@ -8008,6 +8381,7 @@ THREADED_TEST(CallAsFunction) {
{ Local<v8::FunctionTemplate> t = v8::FunctionTemplate::New();
Local<ObjectTemplate> instance_template(t->InstanceTemplate());
+ USE(instance_template);
Local<v8::Object> instance = t->GetFunction()->NewInstance();
context->Global()->Set(v8_str("obj2"), instance);
v8::TryCatch try_catch;
@@ -8637,10 +9011,10 @@ THREADED_TEST(InterceptorStoreIC) {
0, 0, 0, v8_str("data"));
LocalContext context;
context->Global()->Set(v8_str("o"), templ->NewInstance());
- v8::Handle<Value> value(CompileRun(
- "for (var i = 0; i < 1000; i++) {"
- " o.x = 42;"
- "}"));
+ CompileRun(
+ "for (var i = 0; i < 1000; i++) {"
+ " o.x = 42;"
+ "}");
}
@@ -8766,17 +9140,6 @@ THREADED_TEST(InterceptorCallICInvalidatedCacheable) {
}
-static v8::Handle<Value> call_ic_function5;
-static v8::Handle<Value> InterceptorCallICGetter5(Local<String> name,
- const AccessorInfo& info) {
- ApiTestFuzzer::Fuzz();
- if (v8_str("x")->Equals(name))
- return call_ic_function5;
- else
- return Local<Value>();
-}
-
-
// This test checks that if interceptor doesn't provide a function,
// cached constant function is used
THREADED_TEST(InterceptorCallICConstantFunctionUsed) {
@@ -8797,6 +9160,17 @@ THREADED_TEST(InterceptorCallICConstantFunctionUsed) {
}
+static v8::Handle<Value> call_ic_function5;
+static v8::Handle<Value> InterceptorCallICGetter5(Local<String> name,
+ const AccessorInfo& info) {
+ ApiTestFuzzer::Fuzz();
+ if (v8_str("x")->Equals(name))
+ return call_ic_function5;
+ else
+ return Local<Value>();
+}
+
+
// This test checks that if interceptor provides a function,
// even if we cached constant function, interceptor's function
// is invoked
@@ -8820,6 +9194,48 @@ THREADED_TEST(InterceptorCallICConstantFunctionNotNeeded) {
}
+static v8::Handle<Value> call_ic_function6;
+static v8::Handle<Value> InterceptorCallICGetter6(Local<String> name,
+ const AccessorInfo& info) {
+ ApiTestFuzzer::Fuzz();
+ if (v8_str("x")->Equals(name))
+ return call_ic_function6;
+ else
+ return Local<Value>();
+}
+
+
+// Same test as above, except the code is wrapped in a function
+// to test the optimized compiler.
+THREADED_TEST(InterceptorCallICConstantFunctionNotNeededWrapped) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ v8::Handle<v8::ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetNamedPropertyHandler(InterceptorCallICGetter6);
+ LocalContext context;
+ context->Global()->Set(v8_str("o"), templ->NewInstance());
+ call_ic_function6 =
+ v8_compile("function f(x) { return x - 1; }; f")->Run();
+ v8::Handle<Value> value = CompileRun(
+ "function inc(x) { return x + 1; };"
+ "inc(1);"
+ "o.x = inc;"
+ "function test() {"
+ " var result = 0;"
+ " for (var i = 0; i < 1000; i++) {"
+ " result = o.x(42);"
+ " }"
+ " return result;"
+ "};"
+ "test();"
+ "test();"
+ "test();"
+ "%OptimizeFunctionOnNextCall(test);"
+ "test()");
+ CHECK_EQ(41, value->Int32Value());
+}
+
+
// Test the case when we stored constant function into
// a stub, but it got invalidated later on
THREADED_TEST(InterceptorCallICInvalidatedConstantFunction) {
@@ -9066,11 +9482,11 @@ THREADED_TEST(InterceptorCallICFastApi_TrivialSignature) {
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"var result = 0;"
"for (var i = 0; i < 100; i++) {"
" result = o.method(41);"
- "}"));
+ "}");
CHECK_EQ(42, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(100, interceptor_call_count);
}
@@ -9093,14 +9509,14 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature) {
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
"var result = 0;"
"for (var i = 0; i < 100; i++) {"
" result = receiver.method(41);"
- "}"));
+ "}");
CHECK_EQ(42, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(100, interceptor_call_count);
}
@@ -9123,7 +9539,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss1) {
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9135,7 +9551,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss1) {
" saved_result = result;"
" receiver = {method: function(x) { return x - 1 }};"
" }"
- "}"));
+ "}");
CHECK_EQ(40, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(42, context->Global()->Get(v8_str("saved_result"))->Int32Value());
CHECK_GE(interceptor_call_count, 50);
@@ -9159,7 +9575,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss2) {
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9171,7 +9587,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss2) {
" saved_result = result;"
" o.method = function(x) { return x - 1 };"
" }"
- "}"));
+ "}");
CHECK_EQ(40, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(42, context->Global()->Get(v8_str("saved_result"))->Int32Value());
CHECK_GE(interceptor_call_count, 50);
@@ -9196,7 +9612,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss3) {
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
v8::TryCatch try_catch;
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9208,7 +9624,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_Miss3) {
" saved_result = result;"
" receiver = 333;"
" }"
- "}"));
+ "}");
CHECK(try_catch.HasCaught());
CHECK_EQ(v8_str("TypeError: Object 333 has no method 'method'"),
try_catch.Exception()->ToString());
@@ -9235,7 +9651,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_TypeError) {
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
v8::TryCatch try_catch;
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9247,7 +9663,7 @@ THREADED_TEST(InterceptorCallICFastApi_SimpleSignature_TypeError) {
" saved_result = result;"
" receiver = {method: receiver.method};"
" }"
- "}"));
+ "}");
CHECK(try_catch.HasCaught());
CHECK_EQ(v8_str("TypeError: Illegal invocation"),
try_catch.Exception()->ToString());
@@ -9265,15 +9681,16 @@ THREADED_TEST(CallICFastApi_TrivialSignature) {
v8::Handle<v8::ObjectTemplate> proto_templ = fun_templ->PrototypeTemplate();
proto_templ->Set(v8_str("method"), method_templ);
v8::Handle<v8::ObjectTemplate> templ(fun_templ->InstanceTemplate());
+ USE(templ);
LocalContext context;
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"var result = 0;"
"for (var i = 0; i < 100; i++) {"
" result = o.method(41);"
- "}"));
+ "}");
CHECK_EQ(42, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9288,18 +9705,19 @@ THREADED_TEST(CallICFastApi_SimpleSignature) {
v8::Handle<v8::ObjectTemplate> proto_templ = fun_templ->PrototypeTemplate();
proto_templ->Set(v8_str("method"), method_templ);
v8::Handle<v8::ObjectTemplate> templ(fun_templ->InstanceTemplate());
+ CHECK(!templ.IsEmpty());
LocalContext context;
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
"var result = 0;"
"for (var i = 0; i < 100; i++) {"
" result = receiver.method(41);"
- "}"));
+ "}");
CHECK_EQ(42, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9314,11 +9732,12 @@ THREADED_TEST(CallICFastApi_SimpleSignature_Miss1) {
v8::Handle<v8::ObjectTemplate> proto_templ = fun_templ->PrototypeTemplate();
proto_templ->Set(v8_str("method"), method_templ);
v8::Handle<v8::ObjectTemplate> templ(fun_templ->InstanceTemplate());
+ CHECK(!templ.IsEmpty());
LocalContext context;
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9330,7 +9749,7 @@ THREADED_TEST(CallICFastApi_SimpleSignature_Miss1) {
" saved_result = result;"
" receiver = {method: function(x) { return x - 1 }};"
" }"
- "}"));
+ "}");
CHECK_EQ(40, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(42, context->Global()->Get(v8_str("saved_result"))->Int32Value());
}
@@ -9345,12 +9764,13 @@ THREADED_TEST(CallICFastApi_SimpleSignature_Miss2) {
v8::Handle<v8::ObjectTemplate> proto_templ = fun_templ->PrototypeTemplate();
proto_templ->Set(v8_str("method"), method_templ);
v8::Handle<v8::ObjectTemplate> templ(fun_templ->InstanceTemplate());
+ CHECK(!templ.IsEmpty());
LocalContext context;
v8::Handle<v8::Function> fun = fun_templ->GetFunction();
GenerateSomeGarbage();
context->Global()->Set(v8_str("o"), fun->NewInstance());
v8::TryCatch try_catch;
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o.foo = 17;"
"var receiver = {};"
"receiver.__proto__ = o;"
@@ -9362,7 +9782,7 @@ THREADED_TEST(CallICFastApi_SimpleSignature_Miss2) {
" saved_result = result;"
" receiver = 333;"
" }"
- "}"));
+ "}");
CHECK(try_catch.HasCaught());
CHECK_EQ(v8_str("TypeError: Object 333 has no method 'method'"),
try_catch.Exception()->ToString());
@@ -9390,7 +9810,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChange1) {
templ->SetNamedPropertyHandler(NoBlockGetterX);
LocalContext context;
context->Global()->Set(v8_str("o"), templ->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"proto = new Object();"
"proto.y = function(x) { return x + 1; };"
"proto.z = function(x) { return x - 1; };"
@@ -9400,7 +9820,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChange1) {
"for (var i = 0; i < 10; i++) {"
" if (i == 5) { method = 'z'; };"
" result += o[method](41);"
- "}"));
+ "}");
CHECK_EQ(42*5 + 40*5, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9416,7 +9836,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChange2) {
context->Global()->Set(v8_str("proto1"), templ->NewInstance());
keyed_call_ic_function =
v8_compile("function f(x) { return x - 1; }; f")->Run();
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"o = new Object();"
"proto2 = new Object();"
"o.y = function(x) { return x + 1; };"
@@ -9428,7 +9848,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChange2) {
"for (var i = 0; i < 10; i++) {"
" if (i == 5) { method = 'y'; };"
" result += o[method](41);"
- "}"));
+ "}");
CHECK_EQ(42*5 + 40*5, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9441,7 +9861,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChangeOnGlobal) {
templ->SetNamedPropertyHandler(NoBlockGetterX);
LocalContext context;
context->Global()->Set(v8_str("o"), templ->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"function inc(x) { return x + 1; };"
"inc(1);"
"function dec(x) { return x - 1; };"
@@ -9454,7 +9874,7 @@ THREADED_TEST(InterceptorKeyedCallICKeyChangeOnGlobal) {
"for (var i = 0; i < 10; i++) {"
" if (i == 5) { method = 'y'; };"
" result += o[method](41);"
- "}"));
+ "}");
CHECK_EQ(42*5 + 40*5, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9467,7 +9887,7 @@ THREADED_TEST(InterceptorKeyedCallICFromGlobal) {
LocalContext context;
context->Global()->Set(v8_str("o"), templ_o->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"function len(x) { return x.length; };"
"o.__proto__ = this;"
"var m = 'parseFloat';"
@@ -9478,7 +9898,7 @@ THREADED_TEST(InterceptorKeyedCallICFromGlobal) {
" saved_result = result;"
" };"
" result = o[m]('239');"
- "}"));
+ "}");
CHECK_EQ(3, context->Global()->Get(v8_str("result"))->Int32Value());
CHECK_EQ(239, context->Global()->Get(v8_str("saved_result"))->Int32Value());
}
@@ -9491,7 +9911,7 @@ THREADED_TEST(InterceptorKeyedCallICMapChangeBefore) {
LocalContext context;
context->Global()->Set(v8_str("proto"), templ_o->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"var o = new Object();"
"o.__proto__ = proto;"
"o.method = function(x) { return x + 1; };"
@@ -9500,7 +9920,7 @@ THREADED_TEST(InterceptorKeyedCallICMapChangeBefore) {
"for (var i = 0; i < 10; i++) {"
" if (i == 5) { o.method = function(x) { return x - 1; }; };"
" result += o[m](41);"
- "}"));
+ "}");
CHECK_EQ(42*5 + 40*5, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -9513,7 +9933,7 @@ THREADED_TEST(InterceptorKeyedCallICMapChangeAfter) {
LocalContext context;
context->Global()->Set(v8_str("o"), templ_o->NewInstance());
- v8::Handle<Value> value(CompileRun(
+ CompileRun(
"var proto = new Object();"
"o.__proto__ = proto;"
"proto.method = function(x) { return x + 1; };"
@@ -9522,7 +9942,7 @@ THREADED_TEST(InterceptorKeyedCallICMapChangeAfter) {
"for (var i = 0; i < 10; i++) {"
" if (i == 5) { proto.method = function(x) { return x - 1; }; };"
" result += o[m](41);"
- "}"));
+ "}");
CHECK_EQ(42*5 + 40*5, context->Global()->Get(v8_str("result"))->Int32Value());
}
@@ -10034,7 +10454,7 @@ void ApiTestFuzzer::Run() {
static unsigned linear_congruential_generator;
-void ApiTestFuzzer::Setup(PartOfTest part) {
+void ApiTestFuzzer::SetUp(PartOfTest part) {
linear_congruential_generator = i::FLAG_testing_prng_seed;
fuzzing_ = true;
int count = RegisterThreadedTest::count();
@@ -10098,25 +10518,25 @@ void ApiTestFuzzer::TearDown() {
// Lets not be needlessly self-referential.
TEST(Threading) {
- ApiTestFuzzer::Setup(ApiTestFuzzer::FIRST_PART);
+ ApiTestFuzzer::SetUp(ApiTestFuzzer::FIRST_PART);
ApiTestFuzzer::RunAllTests();
ApiTestFuzzer::TearDown();
}
TEST(Threading2) {
- ApiTestFuzzer::Setup(ApiTestFuzzer::SECOND_PART);
+ ApiTestFuzzer::SetUp(ApiTestFuzzer::SECOND_PART);
ApiTestFuzzer::RunAllTests();
ApiTestFuzzer::TearDown();
}
TEST(Threading3) {
- ApiTestFuzzer::Setup(ApiTestFuzzer::THIRD_PART);
+ ApiTestFuzzer::SetUp(ApiTestFuzzer::THIRD_PART);
ApiTestFuzzer::RunAllTests();
ApiTestFuzzer::TearDown();
}
TEST(Threading4) {
- ApiTestFuzzer::Setup(ApiTestFuzzer::FOURTH_PART);
+ ApiTestFuzzer::SetUp(ApiTestFuzzer::FOURTH_PART);
ApiTestFuzzer::RunAllTests();
ApiTestFuzzer::TearDown();
}
@@ -10439,6 +10859,7 @@ THREADED_TEST(NestedHandleScopeAndContexts) {
env->Enter();
v8::Handle<Value> value = NestedScope(env);
v8::Handle<String> str(value->ToString());
+ CHECK(!str.IsEmpty());
env->Exit();
env.Dispose();
}
@@ -10447,6 +10868,7 @@ THREADED_TEST(NestedHandleScopeAndContexts) {
THREADED_TEST(ExternalAllocatedMemory) {
v8::HandleScope outer;
v8::Persistent<Context> env(Context::New());
+ CHECK(!env.IsEmpty());
const int kSize = 1024*1024;
CHECK_EQ(v8::V8::AdjustAmountOfExternalAllocatedMemory(kSize), kSize);
CHECK_EQ(v8::V8::AdjustAmountOfExternalAllocatedMemory(-kSize), 0);
@@ -10785,6 +11207,7 @@ THREADED_TEST(AccessControlRepeatedContextCreation) {
i::FunctionTemplateInfo::cast(internal_template->constructor()));
CHECK(!constructor->access_check_info()->IsUndefined());
v8::Persistent<Context> context0(Context::New(NULL, global_template));
+ CHECK(!context0.IsEmpty());
CHECK(!constructor->access_check_info()->IsUndefined());
}
@@ -10854,14 +11277,18 @@ THREADED_TEST(TurnOnAccessCheck) {
}
-v8::Handle<v8::String> a;
-v8::Handle<v8::String> h;
+static const char* kPropertyA = "a";
+static const char* kPropertyH = "h";
static bool NamedGetAccessBlockAandH(Local<v8::Object> obj,
Local<Value> name,
v8::AccessType type,
Local<Value> data) {
- return !(name->Equals(a) || name->Equals(h));
+ if (!name->IsString()) return false;
+ i::Handle<i::String> name_handle =
+ v8::Utils::OpenHandle(String::Cast(*name));
+ return !name_handle->IsEqualTo(i::CStrVector(kPropertyA))
+ && !name_handle->IsEqualTo(i::CStrVector(kPropertyH));
}
@@ -10870,9 +11297,7 @@ THREADED_TEST(TurnOnAccessCheckAndRecompile) {
// Create an environment with access check to the global object disabled by
// default. When the registered access checker will block access to properties
- // a and h
- a = v8_str("a");
- h = v8_str("h");
+ // a and h.
v8::Handle<v8::ObjectTemplate> global_template = v8::ObjectTemplate::New();
global_template->SetAccessCheckCallbacks(NamedGetAccessBlockAandH,
IndexedGetAccessBlocker,
@@ -11510,6 +11935,9 @@ THREADED_TEST(MorphCompositeStringTest) {
"var slice = lhs.substring(1, lhs.length - 1);"
"var slice_on_cons = (lhs + rhs).substring(1, lhs.length *2 - 1);");
+ CHECK(!lhs->MayContainNonAscii());
+ CHECK(!rhs->MayContainNonAscii());
+
MorphAString(*v8::Utils::OpenHandle(*lhs), &ascii_resource, &uc16_resource);
MorphAString(*v8::Utils::OpenHandle(*rhs), &ascii_resource, &uc16_resource);
@@ -11975,7 +12403,7 @@ THREADED_TEST(GetCallingContext) {
callback_templ->GetFunction());
calling_context0->Exit();
- // Expose context0 in context1 and setup a function that calls the
+ // Expose context0 in context1 and set up a function that calls the
// callback function.
calling_context1->Enter();
calling_context1->Global()->Set(v8_str("context0"),
@@ -12133,18 +12561,21 @@ THREADED_TEST(PixelArray) {
i::Handle<i::Smi> value(i::Smi::FromInt(2));
i::Handle<i::Object> no_failure;
- no_failure = i::SetElement(jsobj, 1, value, i::kNonStrictMode);
+ no_failure =
+ i::JSObject::SetElement(jsobj, 1, value, NONE, i::kNonStrictMode);
ASSERT(!no_failure.is_null());
i::USE(no_failure);
CHECK_EQ(2, i::Smi::cast(jsobj->GetElement(1)->ToObjectChecked())->value());
*value.location() = i::Smi::FromInt(256);
- no_failure = i::SetElement(jsobj, 1, value, i::kNonStrictMode);
+ no_failure =
+ i::JSObject::SetElement(jsobj, 1, value, NONE, i::kNonStrictMode);
ASSERT(!no_failure.is_null());
i::USE(no_failure);
CHECK_EQ(255,
i::Smi::cast(jsobj->GetElement(1)->ToObjectChecked())->value());
*value.location() = i::Smi::FromInt(-1);
- no_failure = i::SetElement(jsobj, 1, value, i::kNonStrictMode);
+ no_failure =
+ i::JSObject::SetElement(jsobj, 1, value, NONE, i::kNonStrictMode);
ASSERT(!no_failure.is_null());
i::USE(no_failure);
CHECK_EQ(0, i::Smi::cast(jsobj->GetElement(1)->ToObjectChecked())->value());
@@ -12706,8 +13137,15 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
"}"
"ext_array[7];");
CHECK_EQ(0, result->Int32Value());
- CHECK_EQ(
- 0, static_cast<int>(jsobj->GetElement(7)->ToObjectChecked()->Number()));
+ if (array_type == v8::kExternalDoubleArray ||
+ array_type == v8::kExternalFloatArray) {
+ CHECK_EQ(
+ static_cast<int>(i::OS::nan_value()),
+ static_cast<int>(jsobj->GetElement(7)->ToObjectChecked()->Number()));
+ } else {
+ CHECK_EQ(0, static_cast<int>(
+ jsobj->GetElement(7)->ToObjectChecked()->Number()));
+ }
result = CompileRun("for (var i = 0; i < 8; i++) {"
" ext_array[6] = '2.3';"
@@ -12860,11 +13298,6 @@ static void ExternalArrayTestHelper(v8::ExternalArrayType array_type,
const int kLargeElementCount = kXSize * kYSize * 4;
ElementType* large_array_data =
static_cast<ElementType*>(malloc(kLargeElementCount * element_size));
- i::Handle<ExternalArrayClass> large_array(
- i::Handle<ExternalArrayClass>::cast(
- FACTORY->NewExternalArray(kLargeElementCount,
- array_type,
- array_data)));
v8::Handle<v8::Object> large_obj = v8::Object::New();
// Set the elements to be the external array.
large_obj->SetIndexedPropertiesToExternalArrayData(large_array_data,
@@ -13265,8 +13698,8 @@ TEST(CaptureStackTrace) {
v8::Handle<v8::String> overview_src = v8::String::New(overview_source);
v8::Handle<Value> overview_result(
v8::Script::New(overview_src, origin)->Run());
- ASSERT(!overview_result.IsEmpty());
- ASSERT(overview_result->IsObject());
+ CHECK(!overview_result.IsEmpty());
+ CHECK(overview_result->IsObject());
// Test getting DETAILED information.
const char *detailed_source =
@@ -13285,8 +13718,8 @@ TEST(CaptureStackTrace) {
v8::Handle<v8::Script> detailed_script(
v8::Script::New(detailed_src, &detailed_origin));
v8::Handle<Value> detailed_result(detailed_script->Run());
- ASSERT(!detailed_result.IsEmpty());
- ASSERT(detailed_result->IsObject());
+ CHECK(!detailed_result.IsEmpty());
+ CHECK(detailed_result->IsObject());
}
@@ -13344,6 +13777,137 @@ TEST(CaptureStackTraceForUncaughtExceptionAndSetters) {
}
+static void RethrowStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ int frame_count = stack_trace->GetFrameCount();
+ CHECK_EQ(3, frame_count);
+ int line_number[] = {1, 2, 5};
+ for (int i = 0; i < frame_count; i++) {
+ CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
+ }
+}
+
+
+// Test that we only return the stack trace at the site where the exception
+// is first thrown (not where it is rethrown).
+TEST(RethrowStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ // We make sure that
+ // - the stack trace of the ReferenceError in g() is reported.
+ // - the stack trace is not overwritten when e1 is rethrown by t().
+ // - the stack trace of e2 does not overwrite that of e1.
+ const char* source =
+ "function g() { error; } \n"
+ "function f() { g(); } \n"
+ "function t(e) { throw e; } \n"
+ "try { \n"
+ " f(); \n"
+ "} catch (e1) { \n"
+ " try { \n"
+ " error; \n"
+ " } catch (e2) { \n"
+ " t(e1); \n"
+ " } \n"
+ "} \n";
+ v8::V8::AddMessageListener(RethrowStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowStackTraceHandler);
+}
+
+
+static void RethrowPrimitiveStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ int frame_count = stack_trace->GetFrameCount();
+ CHECK_EQ(2, frame_count);
+ int line_number[] = {3, 7};
+ for (int i = 0; i < frame_count; i++) {
+ CHECK_EQ(line_number[i], stack_trace->GetFrame(i)->GetLineNumber());
+ }
+}
+
+
+// Test that we do not recognize identity for primitive exceptions.
+TEST(RethrowPrimitiveStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ // We do not capture stack trace for non Error objects on creation time.
+ // Instead, we capture the stack trace on last throw.
+ const char* source =
+ "function g() { throw 404; } \n"
+ "function f() { g(); } \n"
+ "function t(e) { throw e; } \n"
+ "try { \n"
+ " f(); \n"
+ "} catch (e1) { \n"
+ " t(e1) \n"
+ "} \n";
+ v8::V8::AddMessageListener(RethrowPrimitiveStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowPrimitiveStackTraceHandler);
+}
+
+
+static void RethrowExistingStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ CHECK_EQ(1, stack_trace->GetFrameCount());
+ CHECK_EQ(1, stack_trace->GetFrame(0)->GetLineNumber());
+}
+
+
+// Test that the stack trace is captured when the error object is created and
+// not where it is thrown.
+TEST(RethrowExistingStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ const char* source =
+ "var e = new Error(); \n"
+ "throw e; \n";
+ v8::V8::AddMessageListener(RethrowExistingStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowExistingStackTraceHandler);
+}
+
+
+static void RethrowBogusErrorStackTraceHandler(v8::Handle<v8::Message> message,
+ v8::Handle<v8::Value> data) {
+ // Use the frame where JavaScript is called from.
+ v8::Handle<v8::StackTrace> stack_trace = message->GetStackTrace();
+ CHECK(!stack_trace.IsEmpty());
+ CHECK_EQ(1, stack_trace->GetFrameCount());
+ CHECK_EQ(2, stack_trace->GetFrame(0)->GetLineNumber());
+}
+
+
+// Test that the stack trace is captured where the bogus Error object is thrown.
+TEST(RethrowBogusErrorStackTrace) {
+ v8::HandleScope scope;
+ LocalContext env;
+ const char* source =
+ "var e = {__proto__: new Error()} \n"
+ "throw e; \n";
+ v8::V8::AddMessageListener(RethrowBogusErrorStackTraceHandler);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(true);
+ CompileRun(source);
+ v8::V8::SetCaptureStackTraceForUncaughtExceptions(false);
+ v8::V8::RemoveMessageListeners(RethrowBogusErrorStackTraceHandler);
+}
+
+
v8::Handle<Value> AnalyzeStackOfEvalWithSourceURL(const v8::Arguments& args) {
v8::HandleScope scope;
v8::Handle<v8::StackTrace> stackTrace =
@@ -13384,14 +13948,71 @@ TEST(SourceURLInStackTrace) {
// Test that idle notification can be handled and eventually returns true.
+// This just checks the contract of the IdleNotification() function,
+// and does not verify that it does reasonable work.
THREADED_TEST(IdleNotification) {
- bool rv = false;
- for (int i = 0; i < 100; i++) {
- rv = v8::V8::IdleNotification();
- if (rv)
- break;
+ v8::HandleScope scope;
+ LocalContext env;
+ {
+ // Create garbage in old-space to generate work for idle notification.
+ i::AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < 100; i++) {
+ FACTORY->NewFixedArray(1000, i::TENURED);
+ }
+ }
+ bool finshed_idle_work = false;
+ for (int i = 0; i < 100 && !finshed_idle_work; i++) {
+ finshed_idle_work = v8::V8::IdleNotification();
+ }
+ CHECK(finshed_idle_work);
+}
+
+// Test that idle notification can be handled and eventually returns true.
+// This just checks the contract of the IdleNotification() function,
+// and does not verify that it does reasonable work.
+TEST(IdleNotificationWithSmallHint) {
+ v8::HandleScope scope;
+ LocalContext env;
+ {
+ // Create garbage in old-space to generate work for idle notification.
+ i::AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < 100; i++) {
+ FACTORY->NewFixedArray(1000, i::TENURED);
+ }
}
- CHECK(rv == true);
+ intptr_t old_size = HEAP->SizeOfObjects();
+ bool finshed_idle_work = false;
+ bool no_idle_work = v8::V8::IdleNotification(10);
+ for (int i = 0; i < 200 && !finshed_idle_work; i++) {
+ finshed_idle_work = v8::V8::IdleNotification(10);
+ }
+ intptr_t new_size = HEAP->SizeOfObjects();
+ CHECK(finshed_idle_work);
+ CHECK(no_idle_work || new_size < old_size);
+}
+
+
+// This just checks the contract of the IdleNotification() function,
+// and does not verify that it does reasonable work.
+TEST(IdleNotificationWithLargeHint) {
+ v8::HandleScope scope;
+ LocalContext env;
+ {
+ // Create garbage in old-space to generate work for idle notification.
+ i::AlwaysAllocateScope always_allocate;
+ for (int i = 0; i < 100; i++) {
+ FACTORY->NewFixedArray(1000, i::TENURED);
+ }
+ }
+ intptr_t old_size = HEAP->SizeOfObjects();
+ bool finshed_idle_work = false;
+ bool no_idle_work = v8::V8::IdleNotification(900);
+ for (int i = 0; i < 200 && !finshed_idle_work; i++) {
+ finshed_idle_work = v8::V8::IdleNotification(900);
+ }
+ intptr_t new_size = HEAP->SizeOfObjects();
+ CHECK(finshed_idle_work);
+ CHECK(no_idle_work || new_size < old_size);
}
@@ -13482,6 +14103,63 @@ THREADED_TEST(GetHeapStatistics) {
}
+class VisitorImpl : public v8::ExternalResourceVisitor {
+ public:
+ VisitorImpl(TestResource* r1, TestResource* r2)
+ : resource1_(r1),
+ resource2_(r2),
+ found_resource1_(false),
+ found_resource2_(false) {}
+ virtual ~VisitorImpl() {}
+ virtual void VisitExternalString(v8::Handle<v8::String> string) {
+ if (!string->IsExternal()) {
+ CHECK(string->IsExternalAscii());
+ return;
+ }
+ v8::String::ExternalStringResource* resource =
+ string->GetExternalStringResource();
+ CHECK(resource);
+ if (resource1_ == resource) {
+ CHECK(!found_resource1_);
+ found_resource1_ = true;
+ }
+ if (resource2_ == resource) {
+ CHECK(!found_resource2_);
+ found_resource2_ = true;
+ }
+ }
+ void CheckVisitedResources() {
+ CHECK(found_resource1_);
+ CHECK(found_resource2_);
+ }
+
+ private:
+ v8::String::ExternalStringResource* resource1_;
+ v8::String::ExternalStringResource* resource2_;
+ bool found_resource1_;
+ bool found_resource2_;
+};
+
+TEST(VisitExternalStrings) {
+ v8::HandleScope scope;
+ LocalContext env;
+ const char* string = "Some string";
+ uint16_t* two_byte_string = AsciiToTwoByteString(string);
+ TestResource* resource1 = new TestResource(two_byte_string);
+ v8::Local<v8::String> string1 = v8::String::NewExternal(resource1);
+ TestResource* resource2 = new TestResource(two_byte_string);
+ v8::Local<v8::String> string2 = v8::String::NewExternal(resource2);
+
+ // We need to add usages for string1 and string2 to avoid warnings in GCC 4.7
+ CHECK(string1->IsExternal());
+ CHECK(string2->IsExternal());
+
+ VisitorImpl visitor(resource1, resource2);
+ v8::V8::VisitExternalResources(&visitor);
+ visitor.CheckVisitedResources();
+}
+
+
static double DoubleFromBits(uint64_t value) {
double target;
memcpy(&target, &value, sizeof(target));
@@ -13564,7 +14242,13 @@ THREADED_TEST(QuietSignalingNaNs) {
} else {
uint64_t stored_bits = DoubleToBits(stored_number);
// Check if quiet nan (bits 51..62 all set).
+#if defined(V8_TARGET_ARCH_MIPS) && !defined(USE_SIMULATOR)
+ // Most significant fraction bit for quiet nan is set to 0
+ // on MIPS architecture. Allowed by IEEE-754.
+ CHECK_EQ(0xffe, static_cast<int>((stored_bits >> 51) & 0xfff));
+#else
CHECK_EQ(0xfff, static_cast<int>((stored_bits >> 51) & 0xfff));
+#endif
}
// Check that Date::New preserves non-NaNs in the date range and
@@ -13577,7 +14261,13 @@ THREADED_TEST(QuietSignalingNaNs) {
} else {
uint64_t stored_bits = DoubleToBits(stored_date);
// Check if quiet nan (bits 51..62 all set).
+#if defined(V8_TARGET_ARCH_MIPS) && !defined(USE_SIMULATOR)
+ // Most significant fraction bit for quiet nan is set to 0
+ // on MIPS architecture. Allowed by IEEE-754.
+ CHECK_EQ(0xffe, static_cast<int>((stored_bits >> 51) & 0xfff));
+#else
CHECK_EQ(0xfff, static_cast<int>((stored_bits >> 51) & 0xfff));
+#endif
}
}
}
@@ -13587,6 +14277,7 @@ static v8::Handle<Value> SpaghettiIncident(const v8::Arguments& args) {
v8::HandleScope scope;
v8::TryCatch tc;
v8::Handle<v8::String> str(args[0]->ToString());
+ USE(str);
if (tc.HasCaught())
return tc.ReThrow();
return v8::Undefined();
@@ -13731,6 +14422,17 @@ THREADED_TEST(ScriptOrigin) {
CHECK_EQ(0, script_origin_g.ResourceLineOffset()->Int32Value());
}
+THREADED_TEST(FunctionGetInferredName) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::ScriptOrigin origin = v8::ScriptOrigin(v8::String::New("test"));
+ v8::Handle<v8::String> script = v8::String::New(
+ "var foo = { bar : { baz : function() {}}}; var f = foo.bar.baz;");
+ v8::Script::Compile(script, &origin)->Run();
+ v8::Local<v8::Function> f = v8::Local<v8::Function>::Cast(
+ env->Global()->Get(v8::String::New("f")));
+ CHECK_EQ("foo.bar.baz", *v8::String::AsciiValue(f->GetInferredName()));
+}
THREADED_TEST(ScriptLineNumber) {
v8::HandleScope scope;
@@ -13748,6 +14450,41 @@ THREADED_TEST(ScriptLineNumber) {
}
+THREADED_TEST(ScriptColumnNumber) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::ScriptOrigin origin = v8::ScriptOrigin(v8::String::New("test"),
+ v8::Integer::New(3), v8::Integer::New(2));
+ v8::Handle<v8::String> script = v8::String::New(
+ "function foo() {}\n\n function bar() {}");
+ v8::Script::Compile(script, &origin)->Run();
+ v8::Local<v8::Function> foo = v8::Local<v8::Function>::Cast(
+ env->Global()->Get(v8::String::New("foo")));
+ v8::Local<v8::Function> bar = v8::Local<v8::Function>::Cast(
+ env->Global()->Get(v8::String::New("bar")));
+ CHECK_EQ(14, foo->GetScriptColumnNumber());
+ CHECK_EQ(17, bar->GetScriptColumnNumber());
+}
+
+
+THREADED_TEST(FunctionGetScriptId) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::ScriptOrigin origin = v8::ScriptOrigin(v8::String::New("test"),
+ v8::Integer::New(3), v8::Integer::New(2));
+ v8::Handle<v8::String> scriptSource = v8::String::New(
+ "function foo() {}\n\n function bar() {}");
+ v8::Local<v8::Script> script(v8::Script::Compile(scriptSource, &origin));
+ script->Run();
+ v8::Local<v8::Function> foo = v8::Local<v8::Function>::Cast(
+ env->Global()->Get(v8::String::New("foo")));
+ v8::Local<v8::Function> bar = v8::Local<v8::Function>::Cast(
+ env->Global()->Get(v8::String::New("bar")));
+ CHECK_EQ(script->Id(), foo->GetScriptId());
+ CHECK_EQ(script->Id(), bar->GetScriptId());
+}
+
+
static v8::Handle<Value> GetterWhichReturns42(Local<String> name,
const AccessorInfo& info) {
return v8_num(42);
@@ -14008,7 +14745,7 @@ THREADED_TEST(RoundRobinGetFromCache) {
" for (var i = 0; i < 16; i++) values[i] = %_GetFromCache(0, keys[i]);"
" for (var i = 0; i < 16; i++) {"
" var v = %_GetFromCache(0, keys[i]);"
- " if (v !== values[i])"
+ " if (v.toString() !== values[i].toString())"
" return 'Wrong value for ' + "
" keys[i] + ': ' + v + ' vs. ' + values[i];"
" };"
@@ -14850,7 +15587,7 @@ TEST(RegExp) {
// RegExps are objects on which you can set properties.
re->Set(v8_str("property"), v8::Integer::New(32));
v8::Handle<v8::Value> value(CompileRun("re.property"));
- ASSERT_EQ(32, value->Int32Value());
+ CHECK_EQ(32, value->Int32Value());
v8::TryCatch try_catch;
re = v8::RegExp::New(v8_str("foo["), v8::RegExp::kNone);
@@ -15201,10 +15938,12 @@ THREADED_TEST(AllowCodeGenFromStrings) {
LocalContext context;
// eval and the Function constructor allowed by default.
+ CHECK(context->IsCodeGenerationFromStringsAllowed());
CheckCodeGenerationAllowed();
// Disallow eval and the Function constructor.
context->AllowCodeGenerationFromStrings(false);
+ CHECK(!context->IsCodeGenerationFromStringsAllowed());
CheckCodeGenerationDisallowed();
// Allow again.
@@ -15214,10 +15953,12 @@ THREADED_TEST(AllowCodeGenFromStrings) {
// Disallow but setting a global callback that will allow the calls.
context->AllowCodeGenerationFromStrings(false);
V8::SetAllowCodeGenerationFromStringsCallback(&CodeGenerationAllowed);
+ CHECK(!context->IsCodeGenerationFromStringsAllowed());
CheckCodeGenerationAllowed();
// Set a callback that disallows the code generation.
V8::SetAllowCodeGenerationFromStringsCallback(&CodeGenerationDisallowed);
+ CHECK(!context->IsCodeGenerationFromStringsAllowed());
CheckCodeGenerationDisallowed();
}
@@ -15496,3 +16237,167 @@ THREADED_TEST(ForeignFunctionReceiver) {
foreign_context.Dispose();
}
+
+
+uint8_t callback_fired = 0;
+
+
+void CallCompletedCallback1() {
+ i::OS::Print("Firing callback 1.\n");
+ callback_fired ^= 1; // Toggle first bit.
+}
+
+
+void CallCompletedCallback2() {
+ i::OS::Print("Firing callback 2.\n");
+ callback_fired ^= 2; // Toggle second bit.
+}
+
+
+Handle<Value> RecursiveCall(const Arguments& args) {
+ int32_t level = args[0]->Int32Value();
+ if (level < 3) {
+ level++;
+ i::OS::Print("Entering recursion level %d.\n", level);
+ char script[64];
+ i::Vector<char> script_vector(script, sizeof(script));
+ i::OS::SNPrintF(script_vector, "recursion(%d)", level);
+ CompileRun(script_vector.start());
+ i::OS::Print("Leaving recursion level %d.\n", level);
+ CHECK_EQ(0, callback_fired);
+ } else {
+ i::OS::Print("Recursion ends.\n");
+ CHECK_EQ(0, callback_fired);
+ }
+ return Undefined();
+}
+
+
+TEST(CallCompletedCallback) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::Handle<v8::FunctionTemplate> recursive_runtime =
+ v8::FunctionTemplate::New(RecursiveCall);
+ env->Global()->Set(v8_str("recursion"),
+ recursive_runtime->GetFunction());
+ // Adding the same callback a second time has no effect.
+ v8::V8::AddCallCompletedCallback(CallCompletedCallback1);
+ v8::V8::AddCallCompletedCallback(CallCompletedCallback1);
+ v8::V8::AddCallCompletedCallback(CallCompletedCallback2);
+ i::OS::Print("--- Script (1) ---\n");
+ Local<Script> script =
+ v8::Script::Compile(v8::String::New("recursion(0)"));
+ script->Run();
+ CHECK_EQ(3, callback_fired);
+
+ i::OS::Print("\n--- Script (2) ---\n");
+ callback_fired = 0;
+ v8::V8::RemoveCallCompletedCallback(CallCompletedCallback1);
+ script->Run();
+ CHECK_EQ(2, callback_fired);
+
+ i::OS::Print("\n--- Function ---\n");
+ callback_fired = 0;
+ Local<Function> recursive_function =
+ Local<Function>::Cast(env->Global()->Get(v8_str("recursion")));
+ v8::Handle<Value> args[] = { v8_num(0) };
+ recursive_function->Call(env->Global(), 1, args);
+ CHECK_EQ(2, callback_fired);
+}
+
+
+void CallCompletedCallbackNoException() {
+ v8::HandleScope scope;
+ CompileRun("1+1;");
+}
+
+
+void CallCompletedCallbackException() {
+ v8::HandleScope scope;
+ CompileRun("throw 'second exception';");
+}
+
+
+TEST(CallCompletedCallbackOneException) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::V8::AddCallCompletedCallback(CallCompletedCallbackNoException);
+ CompileRun("throw 'exception';");
+}
+
+
+TEST(CallCompletedCallbackTwoExceptions) {
+ v8::HandleScope scope;
+ LocalContext env;
+ v8::V8::AddCallCompletedCallback(CallCompletedCallbackException);
+ CompileRun("throw 'first exception';");
+}
+
+
+static int probes_counter = 0;
+static int misses_counter = 0;
+static int updates_counter = 0;
+
+
+static int* LookupCounter(const char* name) {
+ if (strcmp(name, "c:V8.MegamorphicStubCacheProbes") == 0) {
+ return &probes_counter;
+ } else if (strcmp(name, "c:V8.MegamorphicStubCacheMisses") == 0) {
+ return &misses_counter;
+ } else if (strcmp(name, "c:V8.MegamorphicStubCacheUpdates") == 0) {
+ return &updates_counter;
+ }
+ return NULL;
+}
+
+
+static const char* kMegamorphicTestProgram =
+ "function ClassA() { };"
+ "function ClassB() { };"
+ "ClassA.prototype.foo = function() { };"
+ "ClassB.prototype.foo = function() { };"
+ "function fooify(obj) { obj.foo(); };"
+ "var a = new ClassA();"
+ "var b = new ClassB();"
+ "for (var i = 0; i < 10000; i++) {"
+ " fooify(a);"
+ " fooify(b);"
+ "}";
+
+
+static void StubCacheHelper(bool primary) {
+ V8::SetCounterFunction(LookupCounter);
+ USE(kMegamorphicTestProgram);
+#ifdef DEBUG
+ i::FLAG_native_code_counters = true;
+ if (primary) {
+ i::FLAG_test_primary_stub_cache = true;
+ } else {
+ i::FLAG_test_secondary_stub_cache = true;
+ }
+ i::FLAG_crankshaft = false;
+ v8::HandleScope scope;
+ LocalContext env;
+ int initial_probes = probes_counter;
+ int initial_misses = misses_counter;
+ int initial_updates = updates_counter;
+ CompileRun(kMegamorphicTestProgram);
+ int probes = probes_counter - initial_probes;
+ int misses = misses_counter - initial_misses;
+ int updates = updates_counter - initial_updates;
+ CHECK_LT(updates, 10);
+ CHECK_LT(misses, 10);
+ CHECK_GE(probes, 10000);
+#endif
+}
+
+
+TEST(SecondaryStubCache) {
+ StubCacheHelper(true);
+}
+
+
+TEST(PrimaryStubCache) {
+ StubCacheHelper(false);
+}
+
diff --git a/src/3rdparty/v8/test/cctest/test-assembler-ia32.cc b/src/3rdparty/v8/test/cctest/test-assembler-ia32.cc
index cdab8f7..815e618 100644
--- a/src/3rdparty/v8/test/cctest/test-assembler-ia32.cc
+++ b/src/3rdparty/v8/test/cctest/test-assembler-ia32.cc
@@ -408,4 +408,72 @@ TEST(AssemblerIa3210) {
__ nop();
}
+
+TEST(AssemblerMultiByteNop) {
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::internal::byte buffer[1024];
+ Assembler assm(Isolate::Current(), buffer, sizeof(buffer));
+ __ push(ebx);
+ __ push(ecx);
+ __ push(edx);
+ __ push(edi);
+ __ push(esi);
+ __ mov(eax, 1);
+ __ mov(ebx, 2);
+ __ mov(ecx, 3);
+ __ mov(edx, 4);
+ __ mov(edi, 5);
+ __ mov(esi, 6);
+ for (int i = 0; i < 16; i++) {
+ int before = assm.pc_offset();
+ __ Nop(i);
+ CHECK_EQ(assm.pc_offset() - before, i);
+ }
+
+ Label fail;
+ __ cmp(eax, 1);
+ __ j(not_equal, &fail);
+ __ cmp(ebx, 2);
+ __ j(not_equal, &fail);
+ __ cmp(ecx, 3);
+ __ j(not_equal, &fail);
+ __ cmp(edx, 4);
+ __ j(not_equal, &fail);
+ __ cmp(edi, 5);
+ __ j(not_equal, &fail);
+ __ cmp(esi, 6);
+ __ j(not_equal, &fail);
+ __ mov(eax, 42);
+ __ pop(esi);
+ __ pop(edi);
+ __ pop(edx);
+ __ pop(ecx);
+ __ pop(ebx);
+ __ ret(0);
+ __ bind(&fail);
+ __ mov(eax, 13);
+ __ pop(esi);
+ __ pop(edi);
+ __ pop(edx);
+ __ pop(ecx);
+ __ pop(ebx);
+ __ ret(0);
+
+ CodeDesc desc;
+ assm.GetCode(&desc);
+ Code* code = Code::cast(HEAP->CreateCode(
+ desc,
+ Code::ComputeFlags(Code::STUB),
+ Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
+ CHECK(code->IsCode());
+
+ F0 f = FUNCTION_CAST<F0>(code->entry());
+ int res = f();
+ CHECK_EQ(42, res);
+}
+
+
+
+
#undef __
diff --git a/src/3rdparty/v8/test/cctest/test-assembler-mips.cc b/src/3rdparty/v8/test/cctest/test-assembler-mips.cc
index a6c76f0..6985433 100644
--- a/src/3rdparty/v8/test/cctest/test-assembler-mips.cc
+++ b/src/3rdparty/v8/test/cctest/test-assembler-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -219,21 +219,21 @@ TEST(MIPS2) {
// Bit twiddling instructions & conditional moves.
// Uses t0-t7 as set above.
- __ clz(v0, t0); // 29
- __ clz(v1, t1); // 19
+ __ Clz(v0, t0); // 29
+ __ Clz(v1, t1); // 19
__ addu(v0, v0, v1); // 48
- __ clz(v1, t2); // 3
+ __ Clz(v1, t2); // 3
__ addu(v0, v0, v1); // 51
- __ clz(v1, t7); // 0
+ __ Clz(v1, t7); // 0
__ addu(v0, v0, v1); // 51
__ Branch(&error, ne, v0, Operand(51));
- __ movn(a0, t3, t0); // Move a0<-t3 (t0 is NOT 0).
+ __ Movn(a0, t3, t0); // Move a0<-t3 (t0 is NOT 0).
__ Ins(a0, t1, 12, 8); // 0x7ff34fff
__ Branch(&error, ne, a0, Operand(0x7ff34fff));
- __ movz(a0, t6, t7); // a0 not updated (t7 is NOT 0).
+ __ Movz(a0, t6, t7); // a0 not updated (t7 is NOT 0).
__ Ext(a1, a0, 8, 12); // 0x34f
__ Branch(&error, ne, a1, Operand(0x34f));
- __ movz(a0, t6, v1); // a0<-t6, v0 is 0, from 8 instr back.
+ __ Movz(a0, t6, v1); // a0<-t6, v0 is 0, from 8 instr back.
__ Branch(&error, ne, a0, Operand(t6));
// Everything was correctly executed. Load the expected result.
@@ -579,8 +579,13 @@ TEST(MIPS7) {
__ bind(&neither_is_nan);
- __ c(OLT, D, f6, f4, 2);
- __ bc1t(&less_than, 2);
+ if (kArchVariant == kLoongson) {
+ __ c(OLT, D, f6, f4);
+ __ bc1t(&less_than);
+ } else {
+ __ c(OLT, D, f6, f4, 2);
+ __ bc1t(&less_than, 2);
+ }
__ nop();
__ sw(zero_reg, MemOperand(a0, OFFSET_OF(T, result)) );
__ Branch(&outa_here);
@@ -774,7 +779,7 @@ TEST(MIPS10) {
Assembler assm(Isolate::Current(), NULL, 0);
Label L, C;
- if (CpuFeatures::IsSupported(FPU) && mips32r2) {
+ if (CpuFeatures::IsSupported(FPU) && kArchVariant == kMips32r2) {
CpuFeatures::Scope scope(FPU);
// Load all structure elements to registers.
diff --git a/src/3rdparty/v8/test/cctest/test-assembler-x64.cc b/src/3rdparty/v8/test/cctest/test-assembler-x64.cc
index 28f7c9b..d81923f 100644
--- a/src/3rdparty/v8/test/cctest/test-assembler-x64.cc
+++ b/src/3rdparty/v8/test/cctest/test-assembler-x64.cc
@@ -36,6 +36,7 @@
#include "cctest.h"
using v8::internal::Assembler;
+using v8::internal::Code;
using v8::internal::CodeDesc;
using v8::internal::FUNCTION_CAST;
using v8::internal::Immediate;
@@ -53,6 +54,7 @@ using v8::internal::r15;
using v8::internal::r8;
using v8::internal::r9;
using v8::internal::rax;
+using v8::internal::rbx;
using v8::internal::rbp;
using v8::internal::rcx;
using v8::internal::rdi;
@@ -86,8 +88,18 @@ static const v8::internal::Register arg2 = rsi;
#define __ assm.
+static v8::Persistent<v8::Context> env;
+
+
+static void InitializeVM() {
+ if (env.IsEmpty()) {
+ env = v8::Context::New();
+ }
+}
+
+
TEST(AssemblerX64ReturnOperation) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -109,7 +121,7 @@ TEST(AssemblerX64ReturnOperation) {
}
TEST(AssemblerX64StackOperations) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -141,7 +153,7 @@ TEST(AssemblerX64StackOperations) {
}
TEST(AssemblerX64ArithmeticOperations) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -163,7 +175,7 @@ TEST(AssemblerX64ArithmeticOperations) {
}
TEST(AssemblerX64ImulOperation) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -191,7 +203,7 @@ TEST(AssemblerX64ImulOperation) {
}
TEST(AssemblerX64MemoryOperands) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -225,7 +237,7 @@ TEST(AssemblerX64MemoryOperands) {
}
TEST(AssemblerX64ControlFlow) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -254,7 +266,7 @@ TEST(AssemblerX64ControlFlow) {
}
TEST(AssemblerX64LoopImmediates) {
- OS::Setup();
+ OS::SetUp();
// Allocate an executable page of memory.
size_t actual_size;
byte* buffer = static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
@@ -359,4 +371,73 @@ TEST(AssemblerX64LabelChaining) {
__ nop();
}
+
+TEST(AssemblerMultiByteNop) {
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::internal::byte buffer[1024];
+ Assembler assm(Isolate::Current(), buffer, sizeof(buffer));
+ __ push(rbx);
+ __ push(rcx);
+ __ push(rdx);
+ __ push(rdi);
+ __ push(rsi);
+ __ movq(rax, Immediate(1));
+ __ movq(rbx, Immediate(2));
+ __ movq(rcx, Immediate(3));
+ __ movq(rdx, Immediate(4));
+ __ movq(rdi, Immediate(5));
+ __ movq(rsi, Immediate(6));
+ for (int i = 0; i < 16; i++) {
+ int before = assm.pc_offset();
+ __ Nop(i);
+ CHECK_EQ(assm.pc_offset() - before, i);
+ }
+
+ Label fail;
+ __ cmpq(rax, Immediate(1));
+ __ j(not_equal, &fail);
+ __ cmpq(rbx, Immediate(2));
+ __ j(not_equal, &fail);
+ __ cmpq(rcx, Immediate(3));
+ __ j(not_equal, &fail);
+ __ cmpq(rdx, Immediate(4));
+ __ j(not_equal, &fail);
+ __ cmpq(rdi, Immediate(5));
+ __ j(not_equal, &fail);
+ __ cmpq(rsi, Immediate(6));
+ __ j(not_equal, &fail);
+ __ movq(rax, Immediate(42));
+ __ pop(rsi);
+ __ pop(rdi);
+ __ pop(rdx);
+ __ pop(rcx);
+ __ pop(rbx);
+ __ ret(0);
+ __ bind(&fail);
+ __ movq(rax, Immediate(13));
+ __ pop(rsi);
+ __ pop(rdi);
+ __ pop(rdx);
+ __ pop(rcx);
+ __ pop(rbx);
+ __ ret(0);
+
+ CodeDesc desc;
+ assm.GetCode(&desc);
+ Code* code = Code::cast(HEAP->CreateCode(
+ desc,
+ Code::ComputeFlags(Code::STUB),
+ v8::internal::Handle<v8::internal::Object>(
+ HEAP->undefined_value()))->ToObjectChecked());
+ CHECK(code->IsCode());
+
+ F0 f = FUNCTION_CAST<F0>(code->entry());
+ int res = f();
+ CHECK_EQ(42, res);
+}
+
+
+
+
#undef __
diff --git a/src/3rdparty/v8/test/cctest/test-ast.cc b/src/3rdparty/v8/test/cctest/test-ast.cc
index 2aa7207..80c7fdf 100644
--- a/src/3rdparty/v8/test/cctest/test-ast.cc
+++ b/src/3rdparty/v8/test/cctest/test-ast.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -40,7 +40,8 @@ TEST(List) {
CHECK_EQ(0, list->length());
ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
- AstNode* node = new(ZONE) EmptyStatement();
+ AstNodeFactory<AstNullVisitor> factory(Isolate::Current());
+ AstNode* node = factory.NewEmptyStatement();
list->Add(node);
CHECK_EQ(1, list->length());
CHECK_EQ(node, list->at(0));
diff --git a/src/3rdparty/v8/test/cctest/test-compiler.cc b/src/3rdparty/v8/test/cctest/test-compiler.cc
index 7f63ebc..9ca0b0a 100644
--- a/src/3rdparty/v8/test/cctest/test-compiler.cc
+++ b/src/3rdparty/v8/test/cctest/test-compiler.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -270,8 +270,7 @@ TEST(UncaughtThrow) {
CHECK(!fun.is_null());
bool has_pending_exception;
Handle<JSObject> global(Isolate::Current()->context()->global());
- Handle<Object> result(
- Execution::Call(fun, global, 0, NULL, &has_pending_exception));
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
CHECK(has_pending_exception);
CHECK_EQ(42.0, Isolate::Current()->pending_exception()->
ToObjectChecked()->Number());
diff --git a/src/3rdparty/v8/test/cctest/test-cpu-profiler.cc b/src/3rdparty/v8/test/cctest/test-cpu-profiler.cc
index f567a0f..b10e688 100644
--- a/src/3rdparty/v8/test/cctest/test-cpu-profiler.cc
+++ b/src/3rdparty/v8/test/cctest/test-cpu-profiler.cc
@@ -216,7 +216,7 @@ TEST(TickEvents) {
TEST(CrashIfStoppingLastNonExistentProfile) {
InitializeVM();
TestSetup test_setup;
- CpuProfiler::Setup();
+ CpuProfiler::SetUp();
CpuProfiler::StartProfiling("1");
CpuProfiler::StopProfiling("2");
CpuProfiler::StartProfiling("1");
@@ -268,7 +268,7 @@ TEST(Issue1398) {
TEST(DeleteAllCpuProfiles) {
InitializeVM();
TestSetup test_setup;
- CpuProfiler::Setup();
+ CpuProfiler::SetUp();
CHECK_EQ(0, CpuProfiler::GetProfilesCount());
CpuProfiler::DeleteAllProfiles();
CHECK_EQ(0, CpuProfiler::GetProfilesCount());
diff --git a/src/3rdparty/v8/test/cctest/test-dataflow.cc b/src/3rdparty/v8/test/cctest/test-dataflow.cc
index ad48f55..a63008d 100644
--- a/src/3rdparty/v8/test/cctest/test-dataflow.cc
+++ b/src/3rdparty/v8/test/cctest/test-dataflow.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -36,16 +36,17 @@ using namespace v8::internal;
TEST(BitVector) {
v8::internal::V8::Initialize(NULL);
- ZoneScope zone(Isolate::Current(), DELETE_ON_EXIT);
+ ZoneScope zone_scope(Isolate::Current(), DELETE_ON_EXIT);
+ Zone* zone = ZONE;
{
- BitVector v(15);
+ BitVector v(15, zone);
v.Add(1);
CHECK(v.Contains(1));
v.Remove(0);
CHECK(!v.Contains(0));
v.Add(0);
v.Add(1);
- BitVector w(15);
+ BitVector w(15, zone);
w.Add(1);
v.Intersect(w);
CHECK(!v.Contains(0));
@@ -53,7 +54,7 @@ TEST(BitVector) {
}
{
- BitVector v(64);
+ BitVector v(64, zone);
v.Add(27);
v.Add(30);
v.Add(31);
@@ -71,9 +72,9 @@ TEST(BitVector) {
}
{
- BitVector v(15);
+ BitVector v(15, zone);
v.Add(0);
- BitVector w(15);
+ BitVector w(15, zone);
w.Add(1);
v.Union(w);
CHECK(v.Contains(0));
@@ -81,13 +82,13 @@ TEST(BitVector) {
}
{
- BitVector v(15);
+ BitVector v(15, zone);
v.Add(0);
- BitVector w(15);
+ BitVector w(15, zone);
w = v;
CHECK(w.Contains(0));
w.Add(1);
- BitVector u(w);
+ BitVector u(w, zone);
CHECK(u.Contains(0));
CHECK(u.Contains(1));
v.Union(w);
@@ -96,9 +97,9 @@ TEST(BitVector) {
}
{
- BitVector v(35);
+ BitVector v(35, zone);
v.Add(0);
- BitVector w(35);
+ BitVector w(35, zone);
w.Add(33);
v.Union(w);
CHECK(v.Contains(0));
@@ -106,15 +107,15 @@ TEST(BitVector) {
}
{
- BitVector v(35);
+ BitVector v(35, zone);
v.Add(32);
v.Add(33);
- BitVector w(35);
+ BitVector w(35, zone);
w.Add(33);
v.Intersect(w);
CHECK(!v.Contains(32));
CHECK(v.Contains(33));
- BitVector r(35);
+ BitVector r(35, zone);
r.CopyFrom(v);
CHECK(!r.Contains(32));
CHECK(r.Contains(33));
diff --git a/src/3rdparty/v8/test/cctest/test-date.cc b/src/3rdparty/v8/test/cctest/test-date.cc
new file mode 100644
index 0000000..903a63a
--- /dev/null
+++ b/src/3rdparty/v8/test/cctest/test-date.cc
@@ -0,0 +1,168 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "global-handles.h"
+#include "snapshot.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+class DateCacheMock: public DateCache {
+ public:
+ struct Rule {
+ int year, start_month, start_day, end_month, end_day, offset_sec;
+ };
+
+ DateCacheMock(int local_offset, Rule* rules, int rules_count)
+ : local_offset_(local_offset), rules_(rules), rules_count_(rules_count) {}
+
+ protected:
+ virtual int GetDaylightSavingsOffsetFromOS(int64_t time_sec) {
+ int days = DaysFromTime(time_sec * 1000);
+ int time_in_day_sec = TimeInDay(time_sec * 1000, days) / 1000;
+ int year, month, day;
+ YearMonthDayFromDays(days, &year, &month, &day);
+ Rule* rule = FindRuleFor(year, month, day, time_in_day_sec);
+ return rule == NULL ? 0 : rule->offset_sec * 1000;
+ }
+
+
+ virtual int GetLocalOffsetFromOS() {
+ return local_offset_;
+ }
+
+ private:
+ Rule* FindRuleFor(int year, int month, int day, int time_in_day_sec) {
+ Rule* result = NULL;
+ for (int i = 0; i < rules_count_; i++)
+ if (Match(&rules_[i], year, month, day, time_in_day_sec)) {
+ result = &rules_[i];
+ }
+ return result;
+ }
+
+
+ bool Match(Rule* rule, int year, int month, int day, int time_in_day_sec) {
+ if (rule->year != 0 && rule->year != year) return false;
+ if (rule->start_month > month) return false;
+ if (rule->end_month < month) return false;
+ int start_day = ComputeRuleDay(year, rule->start_month, rule->start_day);
+ if (rule->start_month == month && start_day > day) return false;
+ if (rule->start_month == month && start_day == day &&
+ 2 * 3600 > time_in_day_sec)
+ return false;
+ int end_day = ComputeRuleDay(year, rule->end_month, rule->end_day);
+ if (rule->end_month == month && end_day < day) return false;
+ if (rule->end_month == month && end_day == day &&
+ 2 * 3600 <= time_in_day_sec)
+ return false;
+ return true;
+ }
+
+
+ int ComputeRuleDay(int year, int month, int day) {
+ if (day != 0) return day;
+ int days = DaysFromYearMonth(year, month);
+ // Find the first Sunday of the month.
+ while (Weekday(days + day) != 6) day++;
+ return day + 1;
+ }
+
+ int local_offset_;
+ Rule* rules_;
+ int rules_count_;
+};
+
+static int64_t TimeFromYearMonthDay(DateCache* date_cache,
+ int year,
+ int month,
+ int day) {
+ int64_t result = date_cache->DaysFromYearMonth(year, month);
+ return (result + day - 1) * DateCache::kMsPerDay;
+}
+
+static void CheckDST(int64_t time) {
+ Isolate* isolate = Isolate::Current();
+ DateCache* date_cache = isolate->date_cache();
+ int64_t actual = date_cache->ToLocal(time);
+ int64_t expected = time + date_cache->GetLocalOffsetFromOS() +
+ date_cache->GetDaylightSavingsOffsetFromOS(time / 1000);
+ CHECK_EQ(actual, expected);
+}
+
+
+TEST(DaylightSavingsTime) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ DateCacheMock::Rule rules[] = {
+ {0, 2, 0, 10, 0, 3600}, // DST from March to November in any year.
+ {2010, 2, 0, 7, 20, 3600}, // DST from March to August 20 in 2010.
+ {2010, 7, 20, 8, 10, 0}, // No DST from August 20 to September 10 in 2010.
+ {2010, 8, 10, 10, 0, 3600}, // DST from September 10 to November in 2010.
+ };
+
+ int local_offset_ms = -36000000; // -10 hours.
+
+ DateCacheMock* date_cache =
+ new DateCacheMock(local_offset_ms, rules, ARRAY_SIZE(rules));
+
+ isolate->set_date_cache(date_cache);
+
+ int64_t start_of_2010 = TimeFromYearMonthDay(date_cache, 2010, 0, 1);
+ int64_t start_of_2011 = TimeFromYearMonthDay(date_cache, 2011, 0, 1);
+ int64_t august_20 = TimeFromYearMonthDay(date_cache, 2010, 7, 20);
+ int64_t september_10 = TimeFromYearMonthDay(date_cache, 2010, 8, 10);
+ CheckDST((august_20 + september_10) / 2);
+ CheckDST(september_10);
+ CheckDST(september_10 + 2 * 3600);
+ CheckDST(september_10 + 2 * 3600 - 1000);
+ CheckDST(august_20 + 2 * 3600);
+ CheckDST(august_20 + 2 * 3600 - 1000);
+ CheckDST(august_20);
+ // Check each day of 2010.
+ for (int64_t time = start_of_2011 + 2 * 3600;
+ time >= start_of_2010;
+ time -= DateCache::kMsPerDay) {
+ CheckDST(time);
+ CheckDST(time - 1000);
+ CheckDST(time + 1000);
+ }
+ // Check one day from 2010 to 2100.
+ for (int year = 2100; year >= 2010; year--) {
+ CheckDST(TimeFromYearMonthDay(date_cache, year, 5, 5));
+ }
+ CheckDST((august_20 + september_10) / 2);
+ CheckDST(september_10);
+ CheckDST(september_10 + 2 * 3600);
+ CheckDST(september_10 + 2 * 3600 - 1000);
+ CheckDST(august_20 + 2 * 3600);
+ CheckDST(august_20 + 2 * 3600 - 1000);
+ CheckDST(august_20);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-debug.cc b/src/3rdparty/v8/test/cctest/test-debug.cc
index 6089478..ffa8458 100644
--- a/src/3rdparty/v8/test/cctest/test-debug.cc
+++ b/src/3rdparty/v8/test/cctest/test-debug.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -856,7 +856,7 @@ static void DebugEventRemoveBreakPoint(v8::DebugEvent event,
if (event == v8::Break) {
break_point_hit_count++;
- v8::Handle<v8::Function> fun(v8::Handle<v8::Function>::Cast(data));
+ CHECK(data->IsFunction());
ClearBreakPoint(debug_event_remove_break_point);
}
}
@@ -1447,8 +1447,7 @@ TEST(BreakPointSurviveGC) {
// Test IC store break point with garbage collection.
{
- v8::Local<v8::Function> bar(
- CompileFunction(&env, "function foo(){}", "foo"));
+ CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){bar=0;}", "foo");
SetBreakPoint(foo, 0);
}
@@ -1456,8 +1455,7 @@ TEST(BreakPointSurviveGC) {
// Test IC load break point with garbage collection.
{
- v8::Local<v8::Function> bar(
- CompileFunction(&env, "function foo(){}", "foo"));
+ CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "bar=1;function foo(){var x=bar;}", "foo");
SetBreakPoint(foo, 0);
}
@@ -1465,8 +1463,7 @@ TEST(BreakPointSurviveGC) {
// Test IC call break point with garbage collection.
{
- v8::Local<v8::Function> bar(
- CompileFunction(&env, "function foo(){}", "foo"));
+ CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env,
"function bar(){};function foo(){bar();}",
"foo");
@@ -1476,8 +1473,7 @@ TEST(BreakPointSurviveGC) {
// Test return break point with garbage collection.
{
- v8::Local<v8::Function> bar(
- CompileFunction(&env, "function foo(){}", "foo"));
+ CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){}", "foo");
SetBreakPoint(foo, 0);
}
@@ -1485,8 +1481,7 @@ TEST(BreakPointSurviveGC) {
// Test non IC break point with garbage collection.
{
- v8::Local<v8::Function> bar(
- CompileFunction(&env, "function foo(){}", "foo"));
+ CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){var bar=0;}", "foo");
SetBreakPoint(foo, 0);
}
@@ -2305,65 +2300,6 @@ TEST(ScriptBreakPointTopLevelCrash) {
CheckDebuggerUnloaded();
}
-// Test that breakpoint_relocation flag is honored
-TEST(ScriptBreakPointNoRelocation) {
- i::FLAG_breakpoint_relocation = false;
-
- v8::HandleScope scope;
- DebugLocalContext env;
- env.ExposeDebug();
-
- // Create a function for checking the function when hitting a break point.
- frame_function_name = CompileFunction(&env,
- frame_function_name_source,
- "frame_function_name");
-
- v8::Debug::SetDebugEventListener(DebugEventBreakPointHitCount,
- v8::Undefined());
-
- v8::Local<v8::String> script1 = v8::String::New(
- "a = 0 // line 0\n"
- " // line 1\n"
- " // line 2\n"
- " // line 3\n"
- "function f() { // line 4\n"
- " return 0; // line 5\n"
- "} // line 6");
-
- // Set the script break point on the empty line
- SetScriptBreakPointByNameFromJS("test.html", 2, -1);
-
- // Compile the script and call the function.
- v8::ScriptOrigin origin(v8::String::New("test.html"), v8::Integer::New(0));
- v8::Script::Compile(script1, &origin)->Run();
- v8::Local<v8::Function> f
- = v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("f")));
- f->Call(env->Global(), 0, NULL);
-
- // Check that a break point was not hit
- CHECK_EQ(0, break_point_hit_count);
-
- v8::Local<v8::String> script2 = v8::String::New(
- "a = 0 // line 0\n"
- "function g() { // line 1\n"
- " return 0; // line 2\n"
- "} // line 3\n"
- "function f() { // line 4\n"
- " return 0; // line 5\n"
- "} // line 6");
-
- // Compile the script and call the new function
- v8::Script::Compile(script2, &origin)->Run();
- v8::Local<v8::Function> g
- = v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("g")));
- g->Call(env->Global(), 0, NULL);
-
- // Check that a break point was not hit
- CHECK_EQ(1, break_point_hit_count);
-
- v8::Debug::SetDebugEventListener(NULL);
- CheckDebuggerUnloaded();
-}
// Test that it is possible to remove the last break point for a function
// inside the break handling of that break point.
@@ -2802,14 +2738,14 @@ TEST(DebugStepKeyedLoadLoop) {
v8::Handle<v8::Value> args[kArgc] = { a };
foo->Call(env->Global(), kArgc, args);
- // Setup break point and step through the function.
+ // Set up break point and step through the function.
SetBreakPoint(foo, 3);
step_action = StepNext;
break_point_hit_count = 0;
foo->Call(env->Global(), kArgc, args);
// With stepping all break locations are hit.
- CHECK_EQ(33, break_point_hit_count);
+ CHECK_EQ(34, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2849,14 +2785,14 @@ TEST(DebugStepKeyedStoreLoop) {
v8::Handle<v8::Value> args[kArgc] = { a };
foo->Call(env->Global(), kArgc, args);
- // Setup break point and step through the function.
+ // Set up break point and step through the function.
SetBreakPoint(foo, 3);
step_action = StepNext;
break_point_hit_count = 0;
foo->Call(env->Global(), kArgc, args);
// With stepping all break locations are hit.
- CHECK_EQ(32, break_point_hit_count);
+ CHECK_EQ(33, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2893,14 +2829,14 @@ TEST(DebugStepNamedLoadLoop) {
// Call function without any break points to ensure inlining is in place.
foo->Call(env->Global(), 0, NULL);
- // Setup break point and step through the function.
+ // Set up break point and step through the function.
SetBreakPoint(foo, 4);
step_action = StepNext;
break_point_hit_count = 0;
foo->Call(env->Global(), 0, NULL);
// With stepping all break locations are hit.
- CHECK_EQ(53, break_point_hit_count);
+ CHECK_EQ(54, break_point_hit_count);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -2928,7 +2864,7 @@ static void DoDebugStepNamedStoreLoop(int expected) {
// Call function without any break points to ensure inlining is in place.
foo->Call(env->Global(), 0, NULL);
- // Setup break point and step through the function.
+ // Set up break point and step through the function.
SetBreakPoint(foo, 3);
step_action = StepNext;
break_point_hit_count = 0;
@@ -2944,7 +2880,7 @@ static void DoDebugStepNamedStoreLoop(int expected) {
// Test of the stepping mechanism for named load in a loop.
TEST(DebugStepNamedStoreLoop) {
- DoDebugStepNamedStoreLoop(22);
+ DoDebugStepNamedStoreLoop(23);
}
@@ -3316,7 +3252,7 @@ TEST(DebugStepForContinue) {
v8::Handle<v8::Value> argv_10[argc] = { v8::Number::New(10) };
result = foo->Call(env->Global(), argc, argv_10);
CHECK_EQ(5, result->Int32Value());
- CHECK_EQ(50, break_point_hit_count);
+ CHECK_EQ(51, break_point_hit_count);
// Looping 100 times.
step_action = StepIn;
@@ -3324,7 +3260,7 @@ TEST(DebugStepForContinue) {
v8::Handle<v8::Value> argv_100[argc] = { v8::Number::New(100) };
result = foo->Call(env->Global(), argc, argv_100);
CHECK_EQ(50, result->Int32Value());
- CHECK_EQ(455, break_point_hit_count);
+ CHECK_EQ(456, break_point_hit_count);
// Get rid of the debug event listener.
v8::Debug::SetDebugEventListener(NULL);
@@ -3368,7 +3304,7 @@ TEST(DebugStepForBreak) {
v8::Handle<v8::Value> argv_10[argc] = { v8::Number::New(10) };
result = foo->Call(env->Global(), argc, argv_10);
CHECK_EQ(9, result->Int32Value());
- CHECK_EQ(53, break_point_hit_count);
+ CHECK_EQ(54, break_point_hit_count);
// Looping 100 times.
step_action = StepIn;
@@ -3376,7 +3312,7 @@ TEST(DebugStepForBreak) {
v8::Handle<v8::Value> argv_100[argc] = { v8::Number::New(100) };
result = foo->Call(env->Global(), argc, argv_100);
CHECK_EQ(99, result->Int32Value());
- CHECK_EQ(503, break_point_hit_count);
+ CHECK_EQ(504, break_point_hit_count);
// Get rid of the debug event listener.
v8::Debug::SetDebugEventListener(NULL);
@@ -3810,8 +3746,7 @@ TEST(BreakOnException) {
v8::internal::Isolate::Current()->TraceException(false);
// Create functions for testing break on exception.
- v8::Local<v8::Function> throws(
- CompileFunction(&env, "function throws(){throw 1;}", "throws"));
+ CompileFunction(&env, "function throws(){throw 1;}", "throws");
v8::Local<v8::Function> caught =
CompileFunction(&env,
"function caught(){try {throws();} catch(e) {};}",
@@ -4287,9 +4222,9 @@ TEST(InterceptorPropertyMirror) {
// Get mirrors for the three objects with interceptor.
CompileRun(
- "named_mirror = debug.MakeMirror(intercepted_named);"
- "indexed_mirror = debug.MakeMirror(intercepted_indexed);"
- "both_mirror = debug.MakeMirror(intercepted_both)");
+ "var named_mirror = debug.MakeMirror(intercepted_named);"
+ "var indexed_mirror = debug.MakeMirror(intercepted_indexed);"
+ "var both_mirror = debug.MakeMirror(intercepted_both)");
CHECK(CompileRun(
"named_mirror instanceof debug.ObjectMirror")->BooleanValue());
CHECK(CompileRun(
@@ -4330,7 +4265,7 @@ TEST(InterceptorPropertyMirror) {
CHECK_EQ(5, CompileRun(source)->Int32Value());
// Get the interceptor properties for the object with only named interceptor.
- CompileRun("named_values = named_mirror.properties()");
+ CompileRun("var named_values = named_mirror.properties()");
// Check that the properties are interceptor properties.
for (int i = 0; i < 3; i++) {
@@ -4349,7 +4284,7 @@ TEST(InterceptorPropertyMirror) {
// Get the interceptor properties for the object with only indexed
// interceptor.
- CompileRun("indexed_values = indexed_mirror.properties()");
+ CompileRun("var indexed_values = indexed_mirror.properties()");
// Check that the properties are interceptor properties.
for (int i = 0; i < 2; i++) {
@@ -4361,7 +4296,7 @@ TEST(InterceptorPropertyMirror) {
// Get the interceptor properties for the object with both types of
// interceptors.
- CompileRun("both_values = both_mirror.properties()");
+ CompileRun("var both_values = both_mirror.properties()");
// Check that the properties are interceptor properties.
for (int i = 0; i < 5; i++) {
@@ -4417,10 +4352,10 @@ TEST(HiddenPrototypePropertyMirror) {
// Get mirrors for the four objects.
CompileRun(
- "o0_mirror = debug.MakeMirror(o0);"
- "o1_mirror = debug.MakeMirror(o1);"
- "o2_mirror = debug.MakeMirror(o2);"
- "o3_mirror = debug.MakeMirror(o3)");
+ "var o0_mirror = debug.MakeMirror(o0);"
+ "var o1_mirror = debug.MakeMirror(o1);"
+ "var o2_mirror = debug.MakeMirror(o2);"
+ "var o3_mirror = debug.MakeMirror(o3)");
CHECK(CompileRun("o0_mirror instanceof debug.ObjectMirror")->BooleanValue());
CHECK(CompileRun("o1_mirror instanceof debug.ObjectMirror")->BooleanValue());
CHECK(CompileRun("o2_mirror instanceof debug.ObjectMirror")->BooleanValue());
@@ -4506,11 +4441,11 @@ TEST(NativeGetterPropertyMirror) {
CHECK_EQ(10, CompileRun("instance.x")->Int32Value());
// Get mirror for the object with property getter.
- CompileRun("instance_mirror = debug.MakeMirror(instance);");
+ CompileRun("var instance_mirror = debug.MakeMirror(instance);");
CHECK(CompileRun(
"instance_mirror instanceof debug.ObjectMirror")->BooleanValue());
- CompileRun("named_names = instance_mirror.propertyNames();");
+ CompileRun("var named_names = instance_mirror.propertyNames();");
CHECK_EQ(1, CompileRun("named_names.length")->Int32Value());
CHECK(CompileRun("named_names[0] == 'x'")->BooleanValue());
CHECK(CompileRun(
@@ -4542,7 +4477,7 @@ TEST(NativeGetterThrowingErrorPropertyMirror) {
env->Global()->Set(v8::String::New("instance"), named->NewInstance());
// Get mirror for the object with property getter.
- CompileRun("instance_mirror = debug.MakeMirror(instance);");
+ CompileRun("var instance_mirror = debug.MakeMirror(instance);");
CHECK(CompileRun(
"instance_mirror instanceof debug.ObjectMirror")->BooleanValue());
CompileRun("named_names = instance_mirror.propertyNames();");
@@ -5608,8 +5543,6 @@ TEST(DebuggerUnload) {
// Get the test functions again.
v8::Local<v8::Function> foo(v8::Local<v8::Function>::Cast(
env->Global()->Get(v8::String::New("foo"))));
- v8::Local<v8::Function> bar(v8::Local<v8::Function>::Cast(
- env->Global()->Get(v8::String::New("foo"))));
foo->Call(env->Global(), 0, NULL);
CHECK_EQ(0, break_point_hit_count);
@@ -5768,7 +5701,7 @@ void HostDispatchV8Thread::Run() {
v8::HandleScope scope;
DebugLocalContext env;
- // Setup message and host dispatch handlers.
+ // Set up message and host dispatch handlers.
v8::Debug::SetMessageHandler2(HostDispatchMessageHandler);
v8::Debug::SetHostDispatchHandler(HostDispatchDispatchHandler, 10 /* ms */);
@@ -5856,7 +5789,7 @@ void DebugMessageDispatchV8Thread::Run() {
v8::HandleScope scope;
DebugLocalContext env;
- // Setup debug message dispatch handler.
+ // Set up debug message dispatch handler.
v8::Debug::SetDebugMessageDispatchHandler(DebugMessageHandler);
CompileRun("var y = 1 + 2;\n");
@@ -5910,7 +5843,7 @@ TEST(DebuggerAgent) {
bool ok;
// Initialize the socket library.
- i::Socket::Setup();
+ i::Socket::SetUp();
// Test starting and stopping the agent without any client connection.
debugger->StartAgent("test", kPort1);
@@ -6008,7 +5941,7 @@ TEST(DebuggerAgentProtocolOverflowHeader) {
OS::SNPrintF(i::Vector<char>(port_str, kPortBufferLen), "%d", kPort);
// Initialize the socket library.
- i::Socket::Setup();
+ i::Socket::SetUp();
// Create a socket server to receive a debugger agent message.
DebuggerAgentProtocolServerThread* server =
@@ -6087,6 +6020,8 @@ TEST(DebugGetLoadedScripts) {
EmptyExternalStringResource source_ext_str;
v8::Local<v8::String> source = v8::String::NewExternal(&source_ext_str);
v8::Handle<v8::Script> evil_script(v8::Script::Compile(source));
+ // "use" evil_script to make the compiler happy.
+ (void) evil_script;
Handle<i::ExternalTwoByteString> i_source(
i::ExternalTwoByteString::cast(*v8::Utils::OpenHandle(*source)));
// This situation can happen if source was an external string disposed
@@ -6734,7 +6669,7 @@ static void BreakMessageHandler(const v8::Debug::Message& message) {
break_point_hit_count++;
v8::HandleScope scope;
- v8::Handle<v8::String> json(message.GetJSON());
+ message.GetJSON();
SendContinueCommand();
} else if (message.IsEvent() && message.GetEvent() == v8::AfterCompile) {
@@ -6745,7 +6680,7 @@ static void BreakMessageHandler(const v8::Debug::Message& message) {
isolate->stack_guard()->DebugBreak();
// Force serialization to trigger some internal JS execution.
- v8::Handle<v8::String> json(message.GetJSON());
+ message.GetJSON();
// Restore previous state.
if (is_debug_break) {
@@ -6928,7 +6863,7 @@ TEST(DebugBreakFunctionApply) {
foo->Call(env->Global(), 0, NULL);
// When keeping the debug break several break will happen.
- CHECK_EQ(3, break_point_hit_count);
+ CHECK_GT(break_point_hit_count, 1);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
@@ -7273,10 +7208,10 @@ static void TestDebugBreakInLoop(const char* loop_head,
// Receive 100 breaks for each test and then terminate JavaScript execution.
static const int kBreaksPerTest = 100;
- for (int i = 0; i < 1 && loop_bodies[i] != NULL; i++) {
+ for (int i = 0; loop_bodies[i] != NULL; i++) {
// Perform a lazy deoptimization after various numbers of breaks
// have been hit.
- for (int j = 0; j < 10; j++) {
+ for (int j = 0; j < 11; j++) {
break_point_hit_count_deoptimize = j;
if (j == 10) {
break_point_hit_count_deoptimize = kBreaksPerTest;
@@ -7351,4 +7286,65 @@ TEST(DebugBreakLoop) {
}
+v8::Local<v8::Script> inline_script;
+
+static void DebugBreakInlineListener(v8::DebugEvent event,
+ v8::Handle<v8::Object> exec_state,
+ v8::Handle<v8::Object> event_data,
+ v8::Handle<v8::Value> data) {
+ if (event != v8::Break) return;
+
+ int expected_frame_count = 4;
+ int expected_line_number[] = {1, 4, 7, 12};
+
+ i::Handle<i::Object> compiled_script = v8::Utils::OpenHandle(*inline_script);
+ i::Handle<i::Script> source_script = i::Handle<i::Script>(i::Script::cast(
+ i::JSFunction::cast(*compiled_script)->shared()->script()));
+
+ int break_id = v8::internal::Isolate::Current()->debug()->break_id();
+ char script[128];
+ i::Vector<char> script_vector(script, sizeof(script));
+ OS::SNPrintF(script_vector, "%%GetFrameCount(%d)", break_id);
+ v8::Local<v8::Value> result = CompileRun(script);
+
+ int frame_count = result->Int32Value();
+ CHECK_EQ(expected_frame_count, frame_count);
+
+ for (int i = 0; i < frame_count; i++) {
+ // The 5. element in the returned array of GetFrameDetails contains the
+ // source position of that frame.
+ OS::SNPrintF(script_vector, "%%GetFrameDetails(%d, %d)[5]", break_id, i);
+ v8::Local<v8::Value> result = CompileRun(script);
+ CHECK_EQ(expected_line_number[i],
+ i::GetScriptLineNumber(source_script, result->Int32Value()));
+ }
+ v8::Debug::SetDebugEventListener(NULL);
+ v8::V8::TerminateExecution();
+}
+
+
+TEST(DebugBreakInline) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope scope;
+ DebugLocalContext env;
+ const char* source =
+ "function debug(b) { \n"
+ " if (b) debugger; \n"
+ "} \n"
+ "function f(b) { \n"
+ " debug(b) \n"
+ "}; \n"
+ "function g(b) { \n"
+ " f(b); \n"
+ "}; \n"
+ "g(false); \n"
+ "g(false); \n"
+ "%OptimizeFunctionOnNextCall(g); \n"
+ "g(true);";
+ v8::Debug::SetDebugEventListener(DebugBreakInlineListener);
+ inline_script = v8::Script::Compile(v8::String::New(source));
+ inline_script->Run();
+}
+
+
#endif // ENABLE_DEBUGGER_SUPPORT
diff --git a/src/3rdparty/v8/test/cctest/test-deoptimization.cc b/src/3rdparty/v8/test/cctest/test-deoptimization.cc
index c713b02..c52c578 100644
--- a/src/3rdparty/v8/test/cctest/test-deoptimization.cc
+++ b/src/3rdparty/v8/test/cctest/test-deoptimization.cc
@@ -1,4 +1,4 @@
-// Copyright 2007-2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -97,6 +97,13 @@ class AllowNativesSyntaxNoInlining {
};
+// Abort any ongoing incremental marking to make sure that all weak global
+// handle callbacks are processed.
+static void NonIncrementalGC() {
+ HEAP->CollectAllGarbage(i::Heap::kAbortIncrementalMarkingMask);
+}
+
+
static Handle<JSFunction> GetJSFunction(v8::Handle<v8::Object> obj,
const char* property_name) {
v8::Local<v8::Function> fun =
@@ -107,9 +114,7 @@ static Handle<JSFunction> GetJSFunction(v8::Handle<v8::Object> obj,
TEST(DeoptimizeSimple) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
// Test lazy deoptimization of a simple function.
{
@@ -119,9 +124,9 @@ TEST(DeoptimizeSimple) {
"function h() { %DeoptimizeFunction(f); }"
"function g() { count++; h(); }"
"function f() { g(); };"
- "f();"
- "gc(); gc()");
+ "f();");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
@@ -135,9 +140,9 @@ TEST(DeoptimizeSimple) {
"var count = 0;"
"function g() { count++; %DeoptimizeFunction(f); f(false); }"
"function f(x) { if (x) { g(); } else { return } };"
- "f(true);"
- "gc(); gc()");
+ "f(true);");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
@@ -147,9 +152,7 @@ TEST(DeoptimizeSimple) {
TEST(DeoptimizeSimpleWithArguments) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
// Test lazy deoptimization of a simple function with some arguments.
{
@@ -159,9 +162,9 @@ TEST(DeoptimizeSimpleWithArguments) {
"function h(x) { %DeoptimizeFunction(f); }"
"function g(x, y) { count++; h(x); }"
"function f(x, y, z) { g(1,x); y+z; };"
- "f(1, \"2\", false);"
- "gc(); gc()");
+ "f(1, \"2\", false);");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
@@ -176,9 +179,9 @@ TEST(DeoptimizeSimpleWithArguments) {
"var count = 0;"
"function g(x, y) { count++; %DeoptimizeFunction(f); f(false, 1, y); }"
"function f(x, y, z) { if (x) { g(x, y); } else { return y + z; } };"
- "f(true, 1, \"2\");"
- "gc(); gc()");
+ "f(true, 1, \"2\");");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
@@ -188,9 +191,7 @@ TEST(DeoptimizeSimpleWithArguments) {
TEST(DeoptimizeSimpleNested) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
// Test lazy deoptimization of a simple function. Have a nested function call
// do the deoptimization.
@@ -202,8 +203,8 @@ TEST(DeoptimizeSimpleNested) {
"function h(x, y, z) { return x + y + z; }"
"function g(z) { count++; %DeoptimizeFunction(f); return z;}"
"function f(x,y,z) { return h(x, y, g(z)); };"
- "result = f(1, 2, 3);"
- "gc(); gc()");
+ "result = f(1, 2, 3);");
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK_EQ(6, env->Global()->Get(v8_str("result"))->Int32Value());
@@ -215,9 +216,7 @@ TEST(DeoptimizeSimpleNested) {
TEST(DeoptimizeRecursive) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
{
// Test lazy deoptimization of a simple function called recursively. Call
@@ -228,8 +227,9 @@ TEST(DeoptimizeRecursive) {
"var calls = 0;"
"function g() { count++; %DeoptimizeFunction(f); }"
"function f(x) { calls++; if (x > 0) { f(x - 1); } else { g(); } };"
- "f(10); gc(); gc()");
+ "f(10);");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK_EQ(11, env->Global()->Get(v8_str("calls"))->Int32Value());
@@ -237,15 +237,13 @@ TEST(DeoptimizeRecursive) {
v8::Local<v8::Function> fun =
v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("f")));
- Handle<v8::internal::JSFunction> f(v8::Utils::OpenHandle(*fun));
+ CHECK(!fun.IsEmpty());
}
TEST(DeoptimizeMultiple) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
{
AlwaysOptimizeAllowNativesSyntaxNoInlining options;
@@ -261,9 +259,9 @@ TEST(DeoptimizeMultiple) {
"function f3(x, y, z) { f4(); return x + y + z; };"
"function f2(x, y) { return x + f3(y + 1, y + 1, y + 1) + y; };"
"function f1(x) { return f2(x + 1, x + 1) + x; };"
- "result = f1(1);"
- "gc(); gc()");
+ "result = f1(1);");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK_EQ(14, env->Global()->Get(v8_str("result"))->Int32Value());
@@ -273,9 +271,7 @@ TEST(DeoptimizeMultiple) {
TEST(DeoptimizeConstructor) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
{
AlwaysOptimizeAllowNativesSyntaxNoInlining options;
@@ -284,9 +280,9 @@ TEST(DeoptimizeConstructor) {
"function g() { count++;"
" %DeoptimizeFunction(f); }"
"function f() { g(); };"
- "result = new f() instanceof f;"
- "gc(); gc()");
+ "result = new f() instanceof f;");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK(env->Global()->Get(v8_str("result"))->IsTrue());
@@ -301,9 +297,9 @@ TEST(DeoptimizeConstructor) {
" %DeoptimizeFunction(f); }"
"function f(x, y) { this.x = x; g(); this.y = y; };"
"result = new f(1, 2);"
- "result = result.x + result.y;"
- "gc(); gc()");
+ "result = result.x + result.y;");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK_EQ(3, env->Global()->Get(v8_str("result"))->Int32Value());
@@ -313,9 +309,7 @@ TEST(DeoptimizeConstructor) {
TEST(DeoptimizeConstructorMultiple) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
{
AlwaysOptimizeAllowNativesSyntaxNoInlining options;
@@ -332,9 +326,9 @@ TEST(DeoptimizeConstructorMultiple) {
"function f2(x, y) {"
" this.result = x + new f3(y + 1, y + 1, y + 1).result + y; };"
"function f1(x) { this.result = new f2(x + 1, x + 1).result + x; };"
- "result = new f1(1).result;"
- "gc(); gc()");
+ "result = new f1(1).result;");
}
+ NonIncrementalGC();
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
CHECK_EQ(14, env->Global()->Get(v8_str("result"))->Int32Value());
@@ -344,9 +338,7 @@ TEST(DeoptimizeConstructorMultiple) {
TEST(DeoptimizeBinaryOperationADDString) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
const char* f_source = "function f(x, y) { return x + y; };";
@@ -376,9 +368,9 @@ TEST(DeoptimizeBinaryOperationADDString) {
// Call f and force deoptimization while processing the binary operation.
CompileRun("deopt = true;"
- "var result = f('a+', new X());"
- "gc(); gc();");
+ "var result = f('a+', new X());");
}
+ NonIncrementalGC();
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
@@ -428,18 +420,15 @@ static void TestDeoptimizeBinaryOpHelper(LocalContext* env,
// Call f and force deoptimization while processing the binary operation.
CompileRun("deopt = true;"
- "var result = f(7, new X());"
- "gc(); gc();");
-
+ "var result = f(7, new X());");
+ NonIncrementalGC();
CHECK(!GetJSFunction((*env)->Global(), "f")->IsOptimized());
}
TEST(DeoptimizeBinaryOperationADD) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
TestDeoptimizeBinaryOpHelper(&env, "+");
@@ -451,9 +440,7 @@ TEST(DeoptimizeBinaryOperationADD) {
TEST(DeoptimizeBinaryOperationSUB) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
TestDeoptimizeBinaryOpHelper(&env, "-");
@@ -465,9 +452,7 @@ TEST(DeoptimizeBinaryOperationSUB) {
TEST(DeoptimizeBinaryOperationMUL) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
TestDeoptimizeBinaryOpHelper(&env, "*");
@@ -479,9 +464,7 @@ TEST(DeoptimizeBinaryOperationMUL) {
TEST(DeoptimizeBinaryOperationDIV) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
TestDeoptimizeBinaryOpHelper(&env, "/");
@@ -493,9 +476,7 @@ TEST(DeoptimizeBinaryOperationDIV) {
TEST(DeoptimizeBinaryOperationMOD) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
TestDeoptimizeBinaryOpHelper(&env, "%");
@@ -507,9 +488,7 @@ TEST(DeoptimizeBinaryOperationMOD) {
TEST(DeoptimizeCompare) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
const char* f_source = "function f(x, y) { return x < y; };";
@@ -539,9 +518,9 @@ TEST(DeoptimizeCompare) {
// Call f and force deoptimization while processing the comparison.
CompileRun("deopt = true;"
- "var result = f('a', new X());"
- "gc(); gc();");
+ "var result = f('a', new X());");
}
+ NonIncrementalGC();
CHECK(!GetJSFunction(env->Global(), "f")->IsOptimized());
CHECK_EQ(1, env->Global()->Get(v8_str("count"))->Int32Value());
@@ -552,9 +531,7 @@ TEST(DeoptimizeCompare) {
TEST(DeoptimizeLoadICStoreIC) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
// Functions to generate load/store/keyed load/keyed store IC calls.
const char* f1_source = "function f1(x) { return x.y; };";
@@ -618,9 +595,9 @@ TEST(DeoptimizeLoadICStoreIC) {
"var result = f1(new X());"
"g1(new X());"
"f2(new X(), 'z');"
- "g2(new X(), 'z');"
- "gc(); gc();");
+ "g2(new X(), 'z');");
}
+ NonIncrementalGC();
CHECK(!GetJSFunction(env->Global(), "f1")->IsOptimized());
CHECK(!GetJSFunction(env->Global(), "g1")->IsOptimized());
@@ -634,9 +611,7 @@ TEST(DeoptimizeLoadICStoreIC) {
TEST(DeoptimizeLoadICStoreICNested) {
v8::HandleScope scope;
- const char* extension_list[] = { "v8/gc" };
- v8::ExtensionConfiguration extensions(1, extension_list);
- LocalContext env(&extensions);
+ LocalContext env;
// Functions to generate load/store/keyed load/keyed store IC calls.
const char* f1_source = "function f1(x) { return x.y; };";
@@ -701,9 +676,9 @@ TEST(DeoptimizeLoadICStoreICNested) {
// Call functions and force deoptimization while processing the ics.
CompileRun("deopt = true;"
- "var result = f1(new X());"
- "gc(); gc();");
+ "var result = f1(new X());");
}
+ NonIncrementalGC();
CHECK(!GetJSFunction(env->Global(), "f1")->IsOptimized());
CHECK(!GetJSFunction(env->Global(), "g1")->IsOptimized());
diff --git a/src/3rdparty/v8/test/cctest/test-disasm-arm.cc b/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
index 032e6bc..0e9432d 100644
--- a/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
+++ b/src/3rdparty/v8/test/cctest/test-disasm-arm.cc
@@ -69,10 +69,10 @@ bool DisassembleAndCompare(byte* pc, const char* compare_string) {
}
-// Setup V8 to a state where we can at least run the assembler and
+// Set up V8 to a state where we can at least run the assembler and
// disassembler. Declare the variables and allocate the data structures used
// in the rest of the macros.
-#define SETUP() \
+#define SET_UP() \
InitializeVM(); \
v8::HandleScope scope; \
byte *buffer = reinterpret_cast<byte*>(malloc(4*1024)); \
@@ -102,7 +102,7 @@ if (failure) { \
TEST(Type0) {
- SETUP();
+ SET_UP();
COMPARE(and_(r0, r1, Operand(r2)),
"e0010002 and r0, r1, r2");
@@ -329,7 +329,7 @@ TEST(Type0) {
TEST(Type1) {
- SETUP();
+ SET_UP();
COMPARE(and_(r0, r1, Operand(0x00000000)),
"e2010000 and r0, r1, #0");
@@ -358,7 +358,7 @@ TEST(Type1) {
TEST(Type3) {
- SETUP();
+ SET_UP();
if (CpuFeatures::IsSupported(ARMv7)) {
COMPARE(ubfx(r0, r1, 5, 10),
@@ -413,7 +413,7 @@ TEST(Type3) {
TEST(Vfp) {
- SETUP();
+ SET_UP();
if (CpuFeatures::IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
@@ -546,7 +546,7 @@ TEST(Vfp) {
TEST(LoadStore) {
- SETUP();
+ SET_UP();
COMPARE(ldrb(r0, MemOperand(r1)),
"e5d10000 ldrb r0, [r1, #+0]");
diff --git a/src/3rdparty/v8/test/cctest/test-disasm-ia32.cc b/src/3rdparty/v8/test/cctest/test-disasm-ia32.cc
index 1e38e4e..da09505 100644
--- a/src/3rdparty/v8/test/cctest/test-disasm-ia32.cc
+++ b/src/3rdparty/v8/test/cctest/test-disasm-ia32.cc
@@ -449,6 +449,11 @@ TEST(DisasmIa320) {
}
}
+ // Nop instructions
+ for (int i = 0; i < 16; i++) {
+ __ Nop(i);
+ }
+
__ ret(0);
CodeDesc desc;
diff --git a/src/3rdparty/v8/test/cctest/test-disasm-mips.cc b/src/3rdparty/v8/test/cctest/test-disasm-mips.cc
index 5ad99d7..1f87424 100644
--- a/src/3rdparty/v8/test/cctest/test-disasm-mips.cc
+++ b/src/3rdparty/v8/test/cctest/test-disasm-mips.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -71,10 +71,10 @@ bool DisassembleAndCompare(byte* pc, const char* compare_string) {
}
-// Setup V8 to a state where we can at least run the assembler and
+// Set up V8 to a state where we can at least run the assembler and
// disassembler. Declare the variables and allocate the data structures used
// in the rest of the macros.
-#define SETUP() \
+#define SET_UP() \
InitializeVM(); \
v8::HandleScope scope; \
byte *buffer = reinterpret_cast<byte*>(malloc(4*1024)); \
@@ -104,7 +104,7 @@ if (failure) { \
TEST(Type0) {
- SETUP();
+ SET_UP();
COMPARE(addu(a0, a1, a2),
"00a62021 addu a0, a1, a2");
@@ -148,12 +148,14 @@ TEST(Type0) {
COMPARE(divu(v0, v1),
"0043001b divu v0, v1");
- COMPARE(mul(a0, a1, a2),
- "70a62002 mul a0, a1, a2");
- COMPARE(mul(t2, t3, t4),
- "716c5002 mul t2, t3, t4");
- COMPARE(mul(v0, v1, s0),
- "70701002 mul v0, v1, s0");
+ if (kArchVariant != kLoongson) {
+ COMPARE(mul(a0, a1, a2),
+ "70a62002 mul a0, a1, a2");
+ COMPARE(mul(t2, t3, t4),
+ "716c5002 mul t2, t3, t4");
+ COMPARE(mul(v0, v1, s0),
+ "70701002 mul v0, v1, s0");
+ }
COMPARE(addiu(a0, a1, 0x0),
"24a40000 addiu a0, a1, 0");
@@ -274,7 +276,7 @@ TEST(Type0) {
COMPARE(srav(v0, v1, fp),
"03c31007 srav v0, v1, fp");
- if (mips32r2) {
+ if (kArchVariant == kMips32r2) {
COMPARE(rotr(a0, a1, 0),
"00252002 rotr a0, a1, 0");
COMPARE(rotr(s0, s1, 8),
@@ -377,48 +379,50 @@ TEST(Type0) {
COMPARE(sltiu(v0, v1, -1),
"2c62ffff sltiu v0, v1, -1");
- COMPARE(movz(a0, a1, a2),
- "00a6200a movz a0, a1, a2");
- COMPARE(movz(s0, s1, s2),
- "0232800a movz s0, s1, s2");
- COMPARE(movz(t2, t3, t4),
- "016c500a movz t2, t3, t4");
- COMPARE(movz(v0, v1, a2),
- "0066100a movz v0, v1, a2");
- COMPARE(movn(a0, a1, a2),
- "00a6200b movn a0, a1, a2");
- COMPARE(movn(s0, s1, s2),
- "0232800b movn s0, s1, s2");
- COMPARE(movn(t2, t3, t4),
- "016c500b movn t2, t3, t4");
- COMPARE(movn(v0, v1, a2),
- "0066100b movn v0, v1, a2");
-
- COMPARE(movt(a0, a1, 1),
- "00a52001 movt a0, a1, 1");
- COMPARE(movt(s0, s1, 2),
- "02298001 movt s0, s1, 2");
- COMPARE(movt(t2, t3, 3),
- "016d5001 movt t2, t3, 3");
- COMPARE(movt(v0, v1, 7),
- "007d1001 movt v0, v1, 7");
- COMPARE(movf(a0, a1, 0),
- "00a02001 movf a0, a1, 0");
- COMPARE(movf(s0, s1, 4),
- "02308001 movf s0, s1, 4");
- COMPARE(movf(t2, t3, 5),
- "01745001 movf t2, t3, 5");
- COMPARE(movf(v0, v1, 6),
- "00781001 movf v0, v1, 6");
-
- COMPARE(clz(a0, a1),
- "70a42020 clz a0, a1");
- COMPARE(clz(s6, s7),
- "72f6b020 clz s6, s7");
- COMPARE(clz(v0, v1),
- "70621020 clz v0, v1");
-
- if (mips32r2) {
+ if (kArchVariant != kLoongson) {
+ COMPARE(movz(a0, a1, a2),
+ "00a6200a movz a0, a1, a2");
+ COMPARE(movz(s0, s1, s2),
+ "0232800a movz s0, s1, s2");
+ COMPARE(movz(t2, t3, t4),
+ "016c500a movz t2, t3, t4");
+ COMPARE(movz(v0, v1, a2),
+ "0066100a movz v0, v1, a2");
+ COMPARE(movn(a0, a1, a2),
+ "00a6200b movn a0, a1, a2");
+ COMPARE(movn(s0, s1, s2),
+ "0232800b movn s0, s1, s2");
+ COMPARE(movn(t2, t3, t4),
+ "016c500b movn t2, t3, t4");
+ COMPARE(movn(v0, v1, a2),
+ "0066100b movn v0, v1, a2");
+
+ COMPARE(movt(a0, a1, 1),
+ "00a52001 movt a0, a1, 1");
+ COMPARE(movt(s0, s1, 2),
+ "02298001 movt s0, s1, 2");
+ COMPARE(movt(t2, t3, 3),
+ "016d5001 movt t2, t3, 3");
+ COMPARE(movt(v0, v1, 7),
+ "007d1001 movt v0, v1, 7");
+ COMPARE(movf(a0, a1, 0),
+ "00a02001 movf a0, a1, 0");
+ COMPARE(movf(s0, s1, 4),
+ "02308001 movf s0, s1, 4");
+ COMPARE(movf(t2, t3, 5),
+ "01745001 movf t2, t3, 5");
+ COMPARE(movf(v0, v1, 6),
+ "00781001 movf v0, v1, 6");
+
+ COMPARE(clz(a0, a1),
+ "70a42020 clz a0, a1");
+ COMPARE(clz(s6, s7),
+ "72f6b020 clz s6, s7");
+ COMPARE(clz(v0, v1),
+ "70621020 clz v0, v1");
+ }
+
+ if (kArchVariant == kMips32r2) {
COMPARE(ins_(a0, a1, 31, 1),
"7ca4ffc4 ins a0, a1, 31, 1");
COMPARE(ins_(s6, s7, 30, 2),
diff --git a/src/3rdparty/v8/test/cctest/test-disasm-x64.cc b/src/3rdparty/v8/test/cctest/test-disasm-x64.cc
new file mode 100644
index 0000000..da85eb9
--- /dev/null
+++ b/src/3rdparty/v8/test/cctest/test-disasm-x64.cc
@@ -0,0 +1,429 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <stdlib.h>
+
+#include "v8.h"
+
+#include "debug.h"
+#include "disasm.h"
+#include "disassembler.h"
+#include "macro-assembler.h"
+#include "serialize.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+static v8::Persistent<v8::Context> env;
+
+static void InitializeVM() {
+ if (env.IsEmpty()) {
+ env = v8::Context::New();
+ }
+}
+
+
+#define __ assm.
+
+
+static void DummyStaticFunction(Object* result) {
+}
+
+
+TEST(DisasmX64) {
+ InitializeVM();
+ v8::HandleScope scope;
+ v8::internal::byte buffer[2048];
+ Assembler assm(Isolate::Current(), buffer, sizeof buffer);
+ DummyStaticFunction(NULL); // just bloody use it (DELETE; debugging)
+
+ // Short immediate instructions
+ __ addq(rax, Immediate(12345678));
+ __ or_(rax, Immediate(12345678));
+ __ subq(rax, Immediate(12345678));
+ __ xor_(rax, Immediate(12345678));
+ __ and_(rax, Immediate(12345678));
+
+ // ---- This one caused crash
+ __ movq(rbx, Operand(rsp, rcx, times_2, 0)); // [rsp+rcx*4]
+
+ // ---- All instructions that I can think of
+ __ addq(rdx, rbx);
+ __ addq(rdx, Operand(rbx, 0));
+ __ addq(rdx, Operand(rbx, 16));
+ __ addq(rdx, Operand(rbx, 1999));
+ __ addq(rdx, Operand(rsp, 0));
+ __ addq(rdx, Operand(rsp, 16));
+ __ addq(rdx, Operand(rsp, 1999));
+ __ nop();
+ __ addq(rdi, Operand(rbp, rcx, times_4, 0));
+ __ addq(rdi, Operand(rbp, rcx, times_4, 12));
+ __ addq(Operand(rbp, rcx, times_4, 12), Immediate(12));
+
+ __ nop();
+ __ addq(rbx, Immediate(12));
+ __ nop();
+ __ nop();
+ __ and_(rdx, Immediate(3));
+ __ and_(rdx, Operand(rsp, 4));
+ __ cmpq(rdx, Immediate(3));
+ __ cmpq(rdx, Operand(rsp, 4));
+ __ cmpq(Operand(rbp, rcx, times_4, 0), Immediate(1000));
+ __ cmpb(rbx, Operand(rbp, rcx, times_2, 0));
+ __ cmpb(Operand(rbp, rcx, times_2, 0), rbx);
+ __ or_(rdx, Immediate(3));
+ __ xor_(rdx, Immediate(3));
+ __ nop();
+ {
+ CHECK(CpuFeatures::IsSupported(CPUID));
+ CpuFeatures::Scope fscope(CPUID);
+ __ cpuid();
+ }
+ {
+ CHECK(CpuFeatures::IsSupported(RDTSC));
+ CpuFeatures::Scope fscope(RDTSC);
+ __ rdtsc();
+ }
+ __ movsxbq(rdx, Operand(rcx, 0));
+ __ movsxwq(rdx, Operand(rcx, 0));
+ __ movzxbl(rdx, Operand(rcx, 0));
+ __ movzxwl(rdx, Operand(rcx, 0));
+ __ movzxbq(rdx, Operand(rcx, 0));
+ __ movzxwq(rdx, Operand(rcx, 0));
+
+ __ nop();
+ __ imul(rdx, rcx);
+ __ shld(rdx, rcx);
+ __ shrd(rdx, rcx);
+ __ bts(Operand(rdx, 0), rcx);
+ __ bts(Operand(rbx, rcx, times_4, 0), rcx);
+ __ nop();
+ __ push(Immediate(12));
+ __ push(Immediate(23456));
+ __ push(rcx);
+ __ push(rsi);
+ __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(Operand(rbx, rcx, times_4, 0));
+ __ push(Operand(rbx, rcx, times_4, 0));
+ __ push(Operand(rbx, rcx, times_4, 10000));
+ __ pop(rdx);
+ __ pop(rax);
+ __ pop(Operand(rbx, rcx, times_4, 0));
+ __ nop();
+
+ __ addq(rdx, Operand(rsp, 16));
+ __ addq(rdx, rcx);
+ __ movb(rdx, Operand(rcx, 0));
+ __ movb(rcx, Immediate(6));
+ __ movb(Operand(rsp, 16), rdx);
+ __ movw(Operand(rsp, 16), rdx);
+ __ nop();
+ __ movsxwq(rdx, Operand(rsp, 12));
+ __ movsxbq(rdx, Operand(rsp, 12));
+ __ movsxlq(rdx, Operand(rsp, 12));
+ __ movzxwq(rdx, Operand(rsp, 12));
+ __ movzxbq(rdx, Operand(rsp, 12));
+ __ nop();
+ __ movq(rdx, Immediate(1234567));
+ __ movq(rdx, Operand(rsp, 12));
+ __ movq(Operand(rbx, rcx, times_4, 10000), Immediate(12345));
+ __ movq(Operand(rbx, rcx, times_4, 10000), rdx);
+ __ nop();
+ __ decb(rdx);
+ __ decb(Operand(rax, 10));
+ __ decb(Operand(rbx, rcx, times_4, 10000));
+ __ decq(rdx);
+ __ cdq();
+
+ __ nop();
+ __ idivq(rdx);
+ __ mul(rdx);
+ __ neg(rdx);
+ __ not_(rdx);
+ __ testq(Operand(rbx, rcx, times_4, 10000), rdx);
+
+ __ imul(rdx, Operand(rbx, rcx, times_4, 10000));
+ __ imul(rdx, rcx, Immediate(12));
+ __ imul(rdx, rcx, Immediate(1000));
+
+ __ incq(rdx);
+ __ incq(Operand(rbx, rcx, times_4, 10000));
+ __ push(Operand(rbx, rcx, times_4, 10000));
+ __ pop(Operand(rbx, rcx, times_4, 10000));
+ __ jmp(Operand(rbx, rcx, times_4, 10000));
+
+ __ lea(rdx, Operand(rbx, rcx, times_4, 10000));
+ __ or_(rdx, Immediate(12345));
+ __ or_(rdx, Operand(rbx, rcx, times_4, 10000));
+
+ __ nop();
+
+ __ rcl(rdx, Immediate(1));
+ __ rcl(rdx, Immediate(7));
+ __ rcr(rdx, Immediate(1));
+ __ rcr(rdx, Immediate(7));
+ __ sar(rdx, Immediate(1));
+ __ sar(rdx, Immediate(6));
+ __ sar_cl(rdx);
+ __ sbbq(rdx, rbx);
+ __ shld(rdx, rbx);
+ __ shl(rdx, Immediate(1));
+ __ shl(rdx, Immediate(6));
+ __ shl_cl(rdx);
+ __ shrd(rdx, rbx);
+ __ shr(rdx, Immediate(1));
+ __ shr(rdx, Immediate(7));
+ __ shr_cl(rdx);
+
+
+ // Immediates
+
+ __ addq(rbx, Immediate(12));
+ __ addq(Operand(rdx, rcx, times_4, 10000), Immediate(12));
+
+ __ and_(rbx, Immediate(12345));
+
+ __ cmpq(rbx, Immediate(12345));
+ __ cmpq(rbx, Immediate(12));
+ __ cmpq(Operand(rdx, rcx, times_4, 10000), Immediate(12));
+ __ cmpb(rax, Immediate(100));
+
+ __ or_(rbx, Immediate(12345));
+
+ __ subq(rbx, Immediate(12));
+ __ subq(Operand(rdx, rcx, times_4, 10000), Immediate(12));
+
+ __ xor_(rbx, Immediate(12345));
+
+ __ imul(rdx, rcx, Immediate(12));
+ __ imul(rdx, rcx, Immediate(1000));
+
+ __ cld();
+
+ __ subq(rdx, Operand(rbx, rcx, times_4, 10000));
+ __ subq(rdx, rbx);
+
+ __ testq(rdx, Immediate(12345));
+ __ testq(Operand(rbx, rcx, times_8, 10000), rdx);
+ __ testb(Operand(rcx, rbx, times_2, 1000), rdx);
+ __ testb(Operand(rax, -20), Immediate(0x9A));
+ __ nop();
+
+ __ xor_(rdx, Immediate(12345));
+ __ xor_(rdx, Operand(rbx, rcx, times_8, 10000));
+ __ bts(Operand(rbx, rcx, times_8, 10000), rdx);
+ __ hlt();
+ __ int3();
+ __ ret(0);
+ __ ret(8);
+
+ // Calls
+
+ Label L1, L2;
+ __ bind(&L1);
+ __ nop();
+ __ call(&L1);
+ __ call(&L2);
+ __ nop();
+ __ bind(&L2);
+ __ call(Operand(rbx, rcx, times_4, 10000));
+ __ nop();
+ Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Builtins::kLoadIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ __ nop();
+ __ nop();
+
+ __ jmp(&L1);
+ __ jmp(Operand(rbx, rcx, times_4, 10000));
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ ExternalReference after_break_target =
+ ExternalReference(Debug_Address::AfterBreakTarget(),
+ assm.isolate());
+#endif // ENABLE_DEBUGGER_SUPPORT
+ __ jmp(ic, RelocInfo::CODE_TARGET);
+ __ nop();
+
+
+ Label Ljcc;
+ __ nop();
+ // long jumps
+ __ j(overflow, &Ljcc);
+ __ j(no_overflow, &Ljcc);
+ __ j(below, &Ljcc);
+ __ j(above_equal, &Ljcc);
+ __ j(equal, &Ljcc);
+ __ j(not_equal, &Ljcc);
+ __ j(below_equal, &Ljcc);
+ __ j(above, &Ljcc);
+ __ j(sign, &Ljcc);
+ __ j(not_sign, &Ljcc);
+ __ j(parity_even, &Ljcc);
+ __ j(parity_odd, &Ljcc);
+ __ j(less, &Ljcc);
+ __ j(greater_equal, &Ljcc);
+ __ j(less_equal, &Ljcc);
+ __ j(greater, &Ljcc);
+ __ nop();
+ __ bind(&Ljcc);
+ // short jumps
+ __ j(overflow, &Ljcc);
+ __ j(no_overflow, &Ljcc);
+ __ j(below, &Ljcc);
+ __ j(above_equal, &Ljcc);
+ __ j(equal, &Ljcc);
+ __ j(not_equal, &Ljcc);
+ __ j(below_equal, &Ljcc);
+ __ j(above, &Ljcc);
+ __ j(sign, &Ljcc);
+ __ j(not_sign, &Ljcc);
+ __ j(parity_even, &Ljcc);
+ __ j(parity_odd, &Ljcc);
+ __ j(less, &Ljcc);
+ __ j(greater_equal, &Ljcc);
+ __ j(less_equal, &Ljcc);
+ __ j(greater, &Ljcc);
+
+ // 0xD9 instructions
+ __ nop();
+
+ __ fld(1);
+ __ fld1();
+ __ fldz();
+ __ fldpi();
+ __ fabs();
+ __ fchs();
+ __ fprem();
+ __ fprem1();
+ __ fincstp();
+ __ ftst();
+ __ fxch(3);
+ __ fld_s(Operand(rbx, rcx, times_4, 10000));
+ __ fstp_s(Operand(rbx, rcx, times_4, 10000));
+ __ ffree(3);
+ __ fld_d(Operand(rbx, rcx, times_4, 10000));
+ __ fstp_d(Operand(rbx, rcx, times_4, 10000));
+ __ nop();
+
+ __ fild_s(Operand(rbx, rcx, times_4, 10000));
+ __ fistp_s(Operand(rbx, rcx, times_4, 10000));
+ __ fild_d(Operand(rbx, rcx, times_4, 10000));
+ __ fistp_d(Operand(rbx, rcx, times_4, 10000));
+ __ fnstsw_ax();
+ __ nop();
+ __ fadd(3);
+ __ fsub(3);
+ __ fmul(3);
+ __ fdiv(3);
+
+ __ faddp(3);
+ __ fsubp(3);
+ __ fmulp(3);
+ __ fdivp(3);
+ __ fcompp();
+ __ fwait();
+ __ nop();
+ {
+ if (CpuFeatures::IsSupported(SSE2)) {
+ CpuFeatures::Scope fscope(SSE2);
+ __ cvttss2si(rdx, Operand(rbx, rcx, times_4, 10000));
+ __ cvttss2si(rdx, xmm1);
+ __ cvttsd2si(rdx, Operand(rbx, rcx, times_4, 10000));
+ __ cvttsd2si(rdx, xmm1);
+ __ cvttsd2siq(rdx, xmm1);
+ __ addsd(xmm1, xmm0);
+ __ mulsd(xmm1, xmm0);
+ __ subsd(xmm1, xmm0);
+ __ divsd(xmm1, xmm0);
+ __ movsd(xmm1, Operand(rbx, rcx, times_4, 10000));
+ __ movsd(Operand(rbx, rcx, times_4, 10000), xmm1);
+ __ ucomisd(xmm0, xmm1);
+
+ // 128 bit move instructions.
+ __ movdqa(xmm0, Operand(rbx, rcx, times_4, 10000));
+ __ movdqa(Operand(rbx, rcx, times_4, 10000), xmm0);
+ }
+ }
+
+ // cmov.
+ {
+ if (CpuFeatures::IsSupported(CMOV)) {
+ CpuFeatures::Scope use_cmov(CMOV);
+ __ cmovq(overflow, rax, Operand(rax, 0));
+ __ cmovq(no_overflow, rax, Operand(rax, 1));
+ __ cmovq(below, rax, Operand(rax, 2));
+ __ cmovq(above_equal, rax, Operand(rax, 3));
+ __ cmovq(equal, rax, Operand(rbx, 0));
+ __ cmovq(not_equal, rax, Operand(rbx, 1));
+ __ cmovq(below_equal, rax, Operand(rbx, 2));
+ __ cmovq(above, rax, Operand(rbx, 3));
+ __ cmovq(sign, rax, Operand(rcx, 0));
+ __ cmovq(not_sign, rax, Operand(rcx, 1));
+ __ cmovq(parity_even, rax, Operand(rcx, 2));
+ __ cmovq(parity_odd, rax, Operand(rcx, 3));
+ __ cmovq(less, rax, Operand(rdx, 0));
+ __ cmovq(greater_equal, rax, Operand(rdx, 1));
+ __ cmovq(less_equal, rax, Operand(rdx, 2));
+ __ cmovq(greater, rax, Operand(rdx, 3));
+ }
+ }
+
+ // andpd, etc.
+ {
+ if (CpuFeatures::IsSupported(SSE2)) {
+ CpuFeatures::Scope fscope(SSE2);
+ __ andpd(xmm0, xmm1);
+ __ andpd(xmm1, xmm2);
+
+ __ movaps(xmm0, xmm1);
+ __ movaps(xmm1, xmm2);
+ }
+ }
+
+ // Nop instructions
+ for (int i = 0; i < 16; i++) {
+ __ Nop(i);
+ }
+
+ __ ret(0);
+
+ CodeDesc desc;
+ assm.GetCode(&desc);
+ Object* code = HEAP->CreateCode(
+ desc,
+ Code::ComputeFlags(Code::STUB),
+ Handle<Object>(HEAP->undefined_value()))->ToObjectChecked();
+ CHECK(code->IsCode());
+#ifdef OBJECT_PRINT
+ Code::cast(code)->Print();
+ byte* begin = Code::cast(code)->instruction_start();
+ byte* end = begin + Code::cast(code)->instruction_size();
+ disasm::Disassembler::Disassemble(stdout, begin, end);
+#endif
+}
+
+#undef __
diff --git a/src/3rdparty/v8/test/cctest/test-hashing.cc b/src/3rdparty/v8/test/cctest/test-hashing.cc
new file mode 100644
index 0000000..a626510
--- /dev/null
+++ b/src/3rdparty/v8/test/cctest/test-hashing.cc
@@ -0,0 +1,260 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <stdlib.h>
+
+#include "v8.h"
+
+#include "factory.h"
+#include "macro-assembler.h"
+#include "cctest.h"
+#include "code-stubs.h"
+#include "objects.h"
+
+#ifdef USE_SIMULATOR
+#include "simulator.h"
+#endif
+
+using namespace v8::internal;
+
+
+typedef uint32_t (*HASH_FUNCTION)();
+
+static v8::Persistent<v8::Context> env;
+
+#define __ masm->
+
+
+void generate(MacroAssembler* masm, i::Vector<const char> string) {
+ // GenerateHashInit takes the first character as an argument so it can't
+ // handle the zero length string.
+ ASSERT(string.length() > 0);
+#ifdef V8_TARGET_ARCH_IA32
+ __ push(ebx);
+ __ push(ecx);
+ __ mov(eax, Immediate(0));
+ __ mov(ebx, Immediate(string.at(0)));
+ StringHelper::GenerateHashInit(masm, eax, ebx, ecx);
+ for (int i = 1; i < string.length(); i++) {
+ __ mov(ebx, Immediate(string.at(i)));
+ StringHelper::GenerateHashAddCharacter(masm, eax, ebx, ecx);
+ }
+ StringHelper::GenerateHashGetHash(masm, eax, ecx);
+ __ pop(ecx);
+ __ pop(ebx);
+ __ Ret();
+#elif V8_TARGET_ARCH_X64
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+ __ push(rbx);
+ __ push(rcx);
+ __ movq(rax, Immediate(0));
+ __ movq(rbx, Immediate(string.at(0)));
+ StringHelper::GenerateHashInit(masm, rax, rbx, rcx);
+ for (int i = 1; i < string.length(); i++) {
+ __ movq(rbx, Immediate(string.at(i)));
+ StringHelper::GenerateHashAddCharacter(masm, rax, rbx, rcx);
+ }
+ StringHelper::GenerateHashGetHash(masm, rax, rcx);
+ __ pop(rcx);
+ __ pop(rbx);
+ __ pop(kRootRegister);
+ __ Ret();
+#elif V8_TARGET_ARCH_ARM
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+
+ __ mov(r0, Operand(0));
+ __ mov(ip, Operand(string.at(0)));
+ StringHelper::GenerateHashInit(masm, r0, ip);
+ for (int i = 1; i < string.length(); i++) {
+ __ mov(ip, Operand(string.at(i)));
+ StringHelper::GenerateHashAddCharacter(masm, r0, ip);
+ }
+ StringHelper::GenerateHashGetHash(masm, r0);
+ __ pop(kRootRegister);
+ __ mov(pc, Operand(lr));
+#elif V8_TARGET_ARCH_MIPS
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+
+ __ li(v0, Operand(0));
+ __ li(t1, Operand(string.at(0)));
+ StringHelper::GenerateHashInit(masm, v0, t1);
+ for (int i = 1; i < string.length(); i++) {
+ __ li(t1, Operand(string.at(i)));
+ StringHelper::GenerateHashAddCharacter(masm, v0, t1);
+ }
+ StringHelper::GenerateHashGetHash(masm, v0);
+ __ pop(kRootRegister);
+ __ jr(ra);
+ __ nop();
+#endif
+}
+
+
+void generate(MacroAssembler* masm, uint32_t key) {
+#ifdef V8_TARGET_ARCH_IA32
+ __ push(ebx);
+ __ mov(eax, Immediate(key));
+ __ GetNumberHash(eax, ebx);
+ __ pop(ebx);
+ __ Ret();
+#elif V8_TARGET_ARCH_X64
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+ __ push(rbx);
+ __ movq(rax, Immediate(key));
+ __ GetNumberHash(rax, rbx);
+ __ pop(rbx);
+ __ pop(kRootRegister);
+ __ Ret();
+#elif V8_TARGET_ARCH_ARM
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+ __ mov(r0, Operand(key));
+ __ GetNumberHash(r0, ip);
+ __ pop(kRootRegister);
+ __ mov(pc, Operand(lr));
+#elif V8_TARGET_ARCH_MIPS
+ __ push(kRootRegister);
+ __ InitializeRootRegister();
+ __ li(v0, Operand(key));
+ __ GetNumberHash(v0, t1);
+ __ pop(kRootRegister);
+ __ jr(ra);
+ __ nop();
+#endif
+}
+
+
+void check(i::Vector<const char> string) {
+ v8::HandleScope scope;
+ v8::internal::byte buffer[2048];
+ MacroAssembler masm(Isolate::Current(), buffer, sizeof buffer);
+
+ generate(&masm, string);
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ Code* code = Code::cast(HEAP->CreateCode(
+ desc,
+ Code::ComputeFlags(Code::STUB),
+ Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
+ CHECK(code->IsCode());
+
+ HASH_FUNCTION hash = FUNCTION_CAST<HASH_FUNCTION>(code->entry());
+ Handle<String> v8_string = FACTORY->NewStringFromAscii(string);
+ v8_string->set_hash_field(String::kEmptyHashField);
+#ifdef USE_SIMULATOR
+ uint32_t codegen_hash =
+ reinterpret_cast<uint32_t>(CALL_GENERATED_CODE(hash, 0, 0, 0, 0, 0));
+#else
+ uint32_t codegen_hash = hash();
+#endif
+ uint32_t runtime_hash = v8_string->Hash();
+ CHECK(runtime_hash == codegen_hash);
+}
+
+
+void check(uint32_t key) {
+ v8::HandleScope scope;
+ v8::internal::byte buffer[2048];
+ MacroAssembler masm(Isolate::Current(), buffer, sizeof buffer);
+
+ generate(&masm, key);
+
+ CodeDesc desc;
+ masm.GetCode(&desc);
+ Code* code = Code::cast(HEAP->CreateCode(
+ desc,
+ Code::ComputeFlags(Code::STUB),
+ Handle<Object>(HEAP->undefined_value()))->ToObjectChecked());
+ CHECK(code->IsCode());
+
+ HASH_FUNCTION hash = FUNCTION_CAST<HASH_FUNCTION>(code->entry());
+#ifdef USE_SIMULATOR
+ uint32_t codegen_hash =
+ reinterpret_cast<uint32_t>(CALL_GENERATED_CODE(hash, 0, 0, 0, 0, 0));
+#else
+ uint32_t codegen_hash = hash();
+#endif
+
+ uint32_t runtime_hash = ComputeIntegerHash(
+ key,
+ Isolate::Current()->heap()->HashSeed());
+ CHECK(runtime_hash == codegen_hash);
+}
+
+
+void check_twochars(char a, char b) {
+ char ab[2] = {a, b};
+ check(i::Vector<const char>(ab, 2));
+}
+
+
+static uint32_t PseudoRandom(uint32_t i, uint32_t j) {
+ return ~(~((i * 781) ^ (j * 329)));
+}
+
+
+TEST(StringHash) {
+ if (env.IsEmpty()) env = v8::Context::New();
+ for (int a = 0; a < String::kMaxAsciiCharCode; a++) {
+ // Numbers are hashed differently.
+ if (a >= '0' && a <= '9') continue;
+ for (int b = 0; b < String::kMaxAsciiCharCode; b++) {
+ if (b >= '0' && b <= '9') continue;
+ check_twochars(static_cast<char>(a), static_cast<char>(b));
+ }
+ }
+ check(i::Vector<const char>("*", 1));
+ check(i::Vector<const char>(".zZ", 3));
+ check(i::Vector<const char>("muc", 3));
+ check(i::Vector<const char>("(>'_')>", 7));
+ check(i::Vector<const char>("-=[ vee eight ftw ]=-", 21));
+}
+
+
+TEST(NumberHash) {
+ if (env.IsEmpty()) env = v8::Context::New();
+
+ // Some specific numbers
+ for (uint32_t key = 0; key < 42; key += 7) {
+ check(key);
+ }
+
+ // Some pseudo-random numbers
+ static const uint32_t kLimit = 1000;
+ for (uint32_t i = 0; i < 5; i++) {
+ for (uint32_t j = 0; j < 5; j++) {
+ check(PseudoRandom(i, j) % kLimit);
+ }
+ }
+}
+
+#undef __
diff --git a/src/3rdparty/v8/test/cctest/test-heap-profiler.cc b/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
index 87e7a7d..3f5e07d 100644
--- a/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
+++ b/src/3rdparty/v8/test/cctest/test-heap-profiler.cc
@@ -18,14 +18,30 @@ class NamedEntriesDetector {
: has_A2(false), has_B2(false), has_C2(false) {
}
- void Apply(i::HeapEntry** entry_ptr) {
- if (IsReachableNodeWithName(*entry_ptr, "A2")) has_A2 = true;
- if (IsReachableNodeWithName(*entry_ptr, "B2")) has_B2 = true;
- if (IsReachableNodeWithName(*entry_ptr, "C2")) has_C2 = true;
+ void CheckEntry(i::HeapEntry* entry) {
+ if (strcmp(entry->name(), "A2") == 0) has_A2 = true;
+ if (strcmp(entry->name(), "B2") == 0) has_B2 = true;
+ if (strcmp(entry->name(), "C2") == 0) has_C2 = true;
}
- static bool IsReachableNodeWithName(i::HeapEntry* entry, const char* name) {
- return strcmp(name, entry->name()) == 0 && entry->painted_reachable();
+ void CheckAllReachables(i::HeapEntry* root) {
+ i::List<i::HeapEntry*> list(10);
+ list.Add(root);
+ root->paint();
+ CheckEntry(root);
+ while (!list.is_empty()) {
+ i::HeapEntry* entry = list.RemoveLast();
+ i::Vector<i::HeapGraphEdge> children = entry->children();
+ for (int i = 0; i < children.length(); ++i) {
+ if (children[i].type() == i::HeapGraphEdge::kShortcut) continue;
+ i::HeapEntry* child = children[i].to();
+ if (!child->painted()) {
+ list.Add(child);
+ child->paint();
+ CheckEntry(child);
+ }
+ }
+ }
}
bool has_A2;
@@ -90,10 +106,6 @@ TEST(HeapSnapshot) {
const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
const v8::HeapGraphNode* global_env2 = GetGlobalObject(snapshot_env2);
- // Paint all nodes reachable from global object.
- i_snapshot_env2->ClearPaint();
- const_cast<i::HeapEntry*>(
- reinterpret_cast<const i::HeapEntry*>(global_env2))->PaintAllReachable();
// Verify, that JS global object of env2 has '..2' properties.
const v8::HeapGraphNode* a2_node =
@@ -105,8 +117,11 @@ TEST(HeapSnapshot) {
NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "b2_2"));
CHECK_NE(NULL, GetProperty(global_env2, v8::HeapGraphEdge::kShortcut, "c2"));
+ // Paint all nodes reachable from global object.
NamedEntriesDetector det;
- i_snapshot_env2->IterateEntries(&det);
+ i_snapshot_env2->ClearPaint();
+ det.CheckAllReachables(const_cast<i::HeapEntry*>(
+ reinterpret_cast<const i::HeapEntry*>(global_env2)));
CHECK(det.has_A2);
CHECK(det.has_B2);
CHECK(det.has_C2);
@@ -136,14 +151,47 @@ TEST(HeapSnapshotObjectSizes) {
GetProperty(x, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, x2);
- // Test approximate sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(false));
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(false));
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(false));
- // Test exact sizes.
- CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize(true));
- CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize(true));
- CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize(true));
+ // Test sizes.
+ CHECK_EQ(x->GetSelfSize() * 3, x->GetRetainedSize());
+ CHECK_EQ(x1->GetSelfSize(), x1->GetRetainedSize());
+ CHECK_EQ(x2->GetSelfSize(), x2->GetRetainedSize());
+}
+
+
+TEST(BoundFunctionInSnapshot) {
+ v8::HandleScope scope;
+ LocalContext env;
+ CompileRun(
+ "function myFunction(a, b) { this.a = a; this.b = b; }\n"
+ "function AAAAA() {}\n"
+ "boundFunction = myFunction.bind(new AAAAA(), 20, new Number(12)); \n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("sizes"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* f =
+ GetProperty(global, v8::HeapGraphEdge::kShortcut, "boundFunction");
+ CHECK(f);
+ CHECK_EQ(v8::String::New("native_bind"), f->GetName());
+ const v8::HeapGraphNode* bindings =
+ GetProperty(f, v8::HeapGraphEdge::kInternal, "bindings");
+ CHECK_NE(NULL, bindings);
+ CHECK_EQ(v8::HeapGraphNode::kArray, bindings->GetType());
+ CHECK_EQ(4, bindings->GetChildrenCount());
+
+ const v8::HeapGraphNode* bound_this = GetProperty(
+ f, v8::HeapGraphEdge::kShortcut, "bound_this");
+ CHECK(bound_this);
+ CHECK_EQ(v8::HeapGraphNode::kObject, bound_this->GetType());
+
+ const v8::HeapGraphNode* bound_function = GetProperty(
+ f, v8::HeapGraphEdge::kShortcut, "bound_function");
+ CHECK(bound_function);
+ CHECK_EQ(v8::HeapGraphNode::kClosure, bound_function->GetType());
+
+ const v8::HeapGraphNode* bound_argument = GetProperty(
+ f, v8::HeapGraphEdge::kShortcut, "bound_argument_1");
+ CHECK(bound_argument);
+ CHECK_EQ(v8::HeapGraphNode::kObject, bound_argument->GetType());
}
@@ -296,14 +344,67 @@ TEST(HeapSnapshotInternalReferences) {
}
-// Trying to introduce a check helper for uint64_t causes many
+// Trying to introduce a check helper for uint32_t causes many
// overloading ambiguities, so it seems easier just to cast
// them to a signed type.
-#define CHECK_EQ_UINT64_T(a, b) \
- CHECK_EQ(static_cast<int64_t>(a), static_cast<int64_t>(b))
-#define CHECK_NE_UINT64_T(a, b) \
+#define CHECK_EQ_SNAPSHOT_OBJECT_ID(a, b) \
+ CHECK_EQ(static_cast<int32_t>(a), static_cast<int32_t>(b))
+#define CHECK_NE_SNAPSHOT_OBJECT_ID(a, b) \
CHECK((a) != (b)) // NOLINT
+TEST(HeapEntryIdsAndArrayShift) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun(
+ "function AnObject() {\n"
+ " this.first = 'first';\n"
+ " this.second = 'second';\n"
+ "}\n"
+ "var a = new Array();\n"
+ "for (var i = 0; i < 10; ++i)\n"
+ " a.push(new AnObject());\n");
+ const v8::HeapSnapshot* snapshot1 =
+ v8::HeapProfiler::TakeSnapshot(v8_str("s1"));
+
+ CompileRun(
+ "for (var i = 0; i < 1; ++i)\n"
+ " a.shift();\n");
+
+ HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
+
+ const v8::HeapSnapshot* snapshot2 =
+ v8::HeapProfiler::TakeSnapshot(v8_str("s2"));
+
+ const v8::HeapGraphNode* global1 = GetGlobalObject(snapshot1);
+ const v8::HeapGraphNode* global2 = GetGlobalObject(snapshot2);
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, global1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(global1->GetId(), global2->GetId());
+
+ const v8::HeapGraphNode* a1 =
+ GetProperty(global1, v8::HeapGraphEdge::kProperty, "a");
+ CHECK_NE(NULL, a1);
+ const v8::HeapGraphNode* e1 =
+ GetProperty(a1, v8::HeapGraphEdge::kHidden, "1");
+ CHECK_NE(NULL, e1);
+ const v8::HeapGraphNode* k1 =
+ GetProperty(e1, v8::HeapGraphEdge::kInternal, "elements");
+ CHECK_NE(NULL, k1);
+ const v8::HeapGraphNode* a2 =
+ GetProperty(global2, v8::HeapGraphEdge::kProperty, "a");
+ CHECK_NE(NULL, a2);
+ const v8::HeapGraphNode* e2 =
+ GetProperty(a2, v8::HeapGraphEdge::kHidden, "1");
+ CHECK_NE(NULL, e2);
+ const v8::HeapGraphNode* k2 =
+ GetProperty(e2, v8::HeapGraphEdge::kInternal, "elements");
+ CHECK_NE(NULL, k2);
+
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(a1->GetId(), a2->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(e1->GetId(), e2->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(k1->GetId(), k2->GetId());
+}
+
TEST(HeapEntryIdsAndGC) {
v8::HandleScope scope;
LocalContext env;
@@ -313,50 +414,56 @@ TEST(HeapEntryIdsAndGC) {
"function B(x) { this.x = x; }\n"
"var a = new A();\n"
"var b = new B(a);");
+ v8::Local<v8::String> s1_str = v8_str("s1");
+ v8::Local<v8::String> s2_str = v8_str("s2");
const v8::HeapSnapshot* snapshot1 =
- v8::HeapProfiler::TakeSnapshot(v8_str("s1"));
+ v8::HeapProfiler::TakeSnapshot(s1_str);
HEAP->CollectAllGarbage(i::Heap::kNoGCFlags);
const v8::HeapSnapshot* snapshot2 =
- v8::HeapProfiler::TakeSnapshot(v8_str("s2"));
+ v8::HeapProfiler::TakeSnapshot(s2_str);
+
+ CHECK_GT(snapshot1->GetMaxSnapshotJSObjectId(), 7000);
+ CHECK(snapshot1->GetMaxSnapshotJSObjectId() <=
+ snapshot2->GetMaxSnapshotJSObjectId());
const v8::HeapGraphNode* global1 = GetGlobalObject(snapshot1);
const v8::HeapGraphNode* global2 = GetGlobalObject(snapshot2);
- CHECK_NE_UINT64_T(0, global1->GetId());
- CHECK_EQ_UINT64_T(global1->GetId(), global2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, global1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(global1->GetId(), global2->GetId());
const v8::HeapGraphNode* A1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "A");
CHECK_NE(NULL, A1);
const v8::HeapGraphNode* A2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "A");
CHECK_NE(NULL, A2);
- CHECK_NE_UINT64_T(0, A1->GetId());
- CHECK_EQ_UINT64_T(A1->GetId(), A2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, A1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(A1->GetId(), A2->GetId());
const v8::HeapGraphNode* B1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "B");
CHECK_NE(NULL, B1);
const v8::HeapGraphNode* B2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "B");
CHECK_NE(NULL, B2);
- CHECK_NE_UINT64_T(0, B1->GetId());
- CHECK_EQ_UINT64_T(B1->GetId(), B2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, B1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(B1->GetId(), B2->GetId());
const v8::HeapGraphNode* a1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a1);
const v8::HeapGraphNode* a2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "a");
CHECK_NE(NULL, a2);
- CHECK_NE_UINT64_T(0, a1->GetId());
- CHECK_EQ_UINT64_T(a1->GetId(), a2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, a1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(a1->GetId(), a2->GetId());
const v8::HeapGraphNode* b1 =
GetProperty(global1, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, b1);
const v8::HeapGraphNode* b2 =
GetProperty(global2, v8::HeapGraphEdge::kProperty, "b");
CHECK_NE(NULL, b2);
- CHECK_NE_UINT64_T(0, b1->GetId());
- CHECK_EQ_UINT64_T(b1->GetId(), b2->GetId());
+ CHECK_NE_SNAPSHOT_OBJECT_ID(0, b1->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(b1->GetId(), b2->GetId());
}
@@ -585,6 +692,22 @@ TEST(HeapSnapshotJSONSerializationAborting) {
}
+static void CheckChildrenIds(const v8::HeapSnapshot* snapshot,
+ const v8::HeapGraphNode* node,
+ int level, int max_level) {
+ if (level > max_level) return;
+ CHECK_EQ(node, snapshot->GetNodeById(node->GetId()));
+ for (int i = 0, count = node->GetChildrenCount(); i < count; ++i) {
+ const v8::HeapGraphEdge* prop = node->GetChild(i);
+ const v8::HeapGraphNode* child =
+ snapshot->GetNodeById(prop->GetToNode()->GetId());
+ CHECK_EQ_SNAPSHOT_OBJECT_ID(prop->GetToNode()->GetId(), child->GetId());
+ CHECK_EQ(prop->GetToNode(), child);
+ CheckChildrenIds(snapshot, child, level + 1, max_level);
+ }
+}
+
+
TEST(HeapSnapshotGetNodeById) {
v8::HandleScope scope;
LocalContext env;
@@ -592,12 +715,7 @@ TEST(HeapSnapshotGetNodeById) {
const v8::HeapSnapshot* snapshot =
v8::HeapProfiler::TakeSnapshot(v8_str("id"));
const v8::HeapGraphNode* root = snapshot->GetRoot();
- CHECK_EQ(root, snapshot->GetNodeById(root->GetId()));
- for (int i = 0, count = root->GetChildrenCount(); i < count; ++i) {
- const v8::HeapGraphEdge* prop = root->GetChild(i);
- CHECK_EQ(
- prop->GetToNode(), snapshot->GetNodeById(prop->GetToNode()->GetId()));
- }
+ CheckChildrenIds(snapshot, root, 0, 3);
// Check a big id, which should not exist yet.
CHECK_EQ(NULL, snapshot->GetNodeById(0x1000000UL));
}
@@ -629,7 +747,7 @@ TEST(TakeHeapSnapshotAborting) {
LocalContext env;
const int snapshots_count = v8::HeapProfiler::GetSnapshotsCount();
- TestActivityControl aborting_control(3);
+ TestActivityControl aborting_control(1);
const v8::HeapSnapshot* no_snapshot =
v8::HeapProfiler::TakeSnapshot(v8_str("abort"),
v8::HeapSnapshot::kFull,
@@ -655,11 +773,13 @@ namespace {
class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
public:
TestRetainedObjectInfo(int hash,
+ const char* group_label,
const char* label,
intptr_t element_count = -1,
intptr_t size = -1)
: disposed_(false),
hash_(hash),
+ group_label_(group_label),
label_(label),
element_count_(element_count),
size_(size) {
@@ -674,6 +794,7 @@ class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
return GetHash() == other->GetHash();
}
virtual intptr_t GetHash() { return hash_; }
+ virtual const char* GetGroupLabel() { return group_label_; }
virtual const char* GetLabel() { return label_; }
virtual intptr_t GetElementCount() { return element_count_; }
virtual intptr_t GetSizeInBytes() { return size_; }
@@ -685,15 +806,15 @@ class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
if (wrapper->IsString()) {
v8::String::AsciiValue ascii(wrapper);
if (strcmp(*ascii, "AAA") == 0)
- return new TestRetainedObjectInfo(1, "aaa", 100);
+ return new TestRetainedObjectInfo(1, "aaa-group", "aaa", 100);
else if (strcmp(*ascii, "BBB") == 0)
- return new TestRetainedObjectInfo(1, "aaa", 100);
+ return new TestRetainedObjectInfo(1, "aaa-group", "aaa", 100);
}
} else if (class_id == 2) {
if (wrapper->IsString()) {
v8::String::AsciiValue ascii(wrapper);
if (strcmp(*ascii, "CCC") == 0)
- return new TestRetainedObjectInfo(2, "ccc");
+ return new TestRetainedObjectInfo(2, "ccc-group", "ccc");
}
}
CHECK(false);
@@ -706,6 +827,7 @@ class TestRetainedObjectInfo : public v8::RetainedObjectInfo {
bool disposed_;
int category_;
int hash_;
+ const char* group_label_;
const char* label_;
intptr_t element_count_;
intptr_t size_;
@@ -758,18 +880,21 @@ TEST(HeapSnapshotRetainedObjectInfo) {
delete TestRetainedObjectInfo::instances[i];
}
- const v8::HeapGraphNode* natives = GetNode(
- snapshot->GetRoot(), v8::HeapGraphNode::kObject, "(Native objects)");
- CHECK_NE(NULL, natives);
- CHECK_EQ(2, natives->GetChildrenCount());
+ const v8::HeapGraphNode* native_group_aaa = GetNode(
+ snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "aaa-group");
+ CHECK_NE(NULL, native_group_aaa);
+ CHECK_EQ(1, native_group_aaa->GetChildrenCount());
const v8::HeapGraphNode* aaa = GetNode(
- natives, v8::HeapGraphNode::kNative, "aaa / 100 entries");
+ native_group_aaa, v8::HeapGraphNode::kNative, "aaa / 100 entries");
CHECK_NE(NULL, aaa);
+ CHECK_EQ(2, aaa->GetChildrenCount());
+
+ const v8::HeapGraphNode* native_group_ccc = GetNode(
+ snapshot->GetRoot(), v8::HeapGraphNode::kSynthetic, "ccc-group");
const v8::HeapGraphNode* ccc = GetNode(
- natives, v8::HeapGraphNode::kNative, "ccc");
+ native_group_ccc, v8::HeapGraphNode::kNative, "ccc");
CHECK_NE(NULL, ccc);
- CHECK_EQ(2, aaa->GetChildrenCount());
const v8::HeapGraphNode* n_AAA = GetNode(
aaa, v8::HeapGraphNode::kString, "AAA");
CHECK_NE(NULL, n_AAA);
@@ -787,6 +912,75 @@ TEST(HeapSnapshotRetainedObjectInfo) {
}
+class GraphWithImplicitRefs {
+ public:
+ static const int kObjectsCount = 4;
+ explicit GraphWithImplicitRefs(LocalContext* env) {
+ CHECK_EQ(NULL, instance_);
+ instance_ = this;
+ for (int i = 0; i < kObjectsCount; i++) {
+ objects_[i] = v8::Persistent<v8::Object>::New(v8::Object::New());
+ }
+ (*env)->Global()->Set(v8_str("root_object"), objects_[0]);
+ }
+ ~GraphWithImplicitRefs() {
+ instance_ = NULL;
+ }
+
+ static void gcPrologue() {
+ instance_->AddImplicitReferences();
+ }
+
+ private:
+ void AddImplicitReferences() {
+ // 0 -> 1
+ v8::V8::AddImplicitReferences(
+ v8::Persistent<v8::Object>::Cast(objects_[0]), &objects_[1], 1);
+ // Adding two more references(note length=2 in params): 1 -> 2, 1 -> 3
+ v8::V8::AddImplicitReferences(
+ v8::Persistent<v8::Object>::Cast(objects_[1]), &objects_[2], 2);
+ }
+
+ v8::Persistent<v8::Value> objects_[kObjectsCount];
+ static GraphWithImplicitRefs* instance_;
+};
+
+GraphWithImplicitRefs* GraphWithImplicitRefs::instance_ = NULL;
+
+
+TEST(HeapSnapshotImplicitReferences) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ GraphWithImplicitRefs graph(&env);
+ v8::V8::SetGlobalGCPrologueCallback(&GraphWithImplicitRefs::gcPrologue);
+
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("implicit_refs"));
+
+ const v8::HeapGraphNode* global_object = GetGlobalObject(snapshot);
+ // Use kShortcut type to skip intermediate JSGlobalPropertyCell
+ const v8::HeapGraphNode* obj0 = GetProperty(
+ global_object, v8::HeapGraphEdge::kShortcut, "root_object");
+ CHECK(obj0);
+ CHECK_EQ(v8::HeapGraphNode::kObject, obj0->GetType());
+ const v8::HeapGraphNode* obj1 = GetProperty(
+ obj0, v8::HeapGraphEdge::kInternal, "native");
+ CHECK(obj1);
+ int implicit_targets_count = 0;
+ for (int i = 0, count = obj1->GetChildrenCount(); i < count; ++i) {
+ const v8::HeapGraphEdge* prop = obj1->GetChild(i);
+ v8::String::AsciiValue prop_name(prop->GetName());
+ if (prop->GetType() == v8::HeapGraphEdge::kInternal &&
+ strcmp("native", *prop_name) == 0) {
+ ++implicit_targets_count;
+ }
+ }
+ CHECK_EQ(2, implicit_targets_count);
+ v8::V8::SetGlobalGCPrologueCallback(NULL);
+}
+
+
TEST(DeleteAllHeapSnapshots) {
v8::HandleScope scope;
LocalContext env;
@@ -895,6 +1089,20 @@ TEST(DocumentURLWithException) {
}
+TEST(NoHandleLeaks) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun("document = { URL:\"abcdefgh\" };");
+
+ v8::Handle<v8::String> name(v8_str("leakz"));
+ int count_before = i::HandleScope::NumberOfHandles();
+ v8::HeapProfiler::TakeSnapshot(name);
+ int count_after = i::HandleScope::NumberOfHandles();
+ CHECK_EQ(count_before, count_after);
+}
+
+
TEST(NodesIteration) {
v8::HandleScope scope;
LocalContext env;
@@ -1023,3 +1231,144 @@ TEST(GetConstructorName) {
CHECK_EQ(0, StringCmp(
"Object", i::V8HeapExplorer::GetConstructorName(*js_obj6)));
}
+
+
+TEST(FastCaseGetter) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun("var obj1 = {};\n"
+ "obj1.__defineGetter__('propWithGetter', function Y() {\n"
+ " return 42;\n"
+ "});\n"
+ "obj1.__defineSetter__('propWithSetter', function Z(value) {\n"
+ " return this.value_ = value;\n"
+ "});\n");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("fastCaseGetter"));
+
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ CHECK_NE(NULL, global);
+ const v8::HeapGraphNode* obj1 =
+ GetProperty(global, v8::HeapGraphEdge::kShortcut, "obj1");
+ CHECK_NE(NULL, obj1);
+ const v8::HeapGraphNode* getterFunction =
+ GetProperty(obj1, v8::HeapGraphEdge::kProperty, "get-propWithGetter");
+ CHECK_NE(NULL, getterFunction);
+ const v8::HeapGraphNode* setterFunction =
+ GetProperty(obj1, v8::HeapGraphEdge::kProperty, "set-propWithSetter");
+ CHECK_NE(NULL, setterFunction);
+}
+
+
+bool HasWeakEdge(const v8::HeapGraphNode* node) {
+ for (int i = 0; i < node->GetChildrenCount(); ++i) {
+ const v8::HeapGraphEdge* handle_edge = node->GetChild(i);
+ if (handle_edge->GetType() == v8::HeapGraphEdge::kWeak) return true;
+ }
+ return false;
+}
+
+
+bool HasWeakGlobalHandle() {
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("weaks"));
+ const v8::HeapGraphNode* gc_roots = GetNode(
+ snapshot->GetRoot(), v8::HeapGraphNode::kObject, "(GC roots)");
+ CHECK_NE(NULL, gc_roots);
+ const v8::HeapGraphNode* global_handles = GetNode(
+ gc_roots, v8::HeapGraphNode::kObject, "(Global handles)");
+ CHECK_NE(NULL, global_handles);
+ return HasWeakEdge(global_handles);
+}
+
+
+static void PersistentHandleCallback(v8::Persistent<v8::Value> handle, void*) {
+ handle.Dispose();
+}
+
+
+TEST(WeakGlobalHandle) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CHECK(!HasWeakGlobalHandle());
+
+ v8::Persistent<v8::Object> handle =
+ v8::Persistent<v8::Object>::New(v8::Object::New());
+ handle.MakeWeak(NULL, PersistentHandleCallback);
+
+ CHECK(HasWeakGlobalHandle());
+}
+
+
+TEST(WeakGlobalContextRefs) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("weaks"));
+ const v8::HeapGraphNode* gc_roots = GetNode(
+ snapshot->GetRoot(), v8::HeapGraphNode::kObject, "(GC roots)");
+ CHECK_NE(NULL, gc_roots);
+ const v8::HeapGraphNode* global_handles = GetNode(
+ gc_roots, v8::HeapGraphNode::kObject, "(Global handles)");
+ CHECK_NE(NULL, global_handles);
+ const v8::HeapGraphNode* global_context = GetNode(
+ global_handles, v8::HeapGraphNode::kHidden, "system / GlobalContext");
+ CHECK_NE(NULL, global_context);
+ CHECK(HasWeakEdge(global_context));
+}
+
+
+TEST(SfiAndJsFunctionWeakRefs) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun(
+ "fun = (function (x) { return function () { return x + 1; } })(1);");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("fun"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ CHECK_NE(NULL, global);
+ const v8::HeapGraphNode* fun =
+ GetProperty(global, v8::HeapGraphEdge::kShortcut, "fun");
+ CHECK(HasWeakEdge(fun));
+ const v8::HeapGraphNode* shared =
+ GetProperty(fun, v8::HeapGraphEdge::kInternal, "shared");
+ CHECK(HasWeakEdge(shared));
+}
+
+
+TEST(PersistentHandleCount) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ // V8 also uses global handles internally, so we can't test for an absolute
+ // number.
+ int global_handle_count = v8::HeapProfiler::GetPersistentHandleCount();
+
+ // Create some persistent handles.
+ v8::Persistent<v8::String> p_AAA =
+ v8::Persistent<v8::String>::New(v8_str("AAA"));
+ CHECK_EQ(global_handle_count + 1,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ v8::Persistent<v8::String> p_BBB =
+ v8::Persistent<v8::String>::New(v8_str("BBB"));
+ CHECK_EQ(global_handle_count + 2,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ v8::Persistent<v8::String> p_CCC =
+ v8::Persistent<v8::String>::New(v8_str("CCC"));
+ CHECK_EQ(global_handle_count + 3,
+ v8::HeapProfiler::GetPersistentHandleCount());
+
+ // Dipose the persistent handles in a different order.
+ p_AAA.Dispose();
+ CHECK_EQ(global_handle_count + 2,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ p_CCC.Dispose();
+ CHECK_EQ(global_handle_count + 1,
+ v8::HeapProfiler::GetPersistentHandleCount());
+ p_BBB.Dispose();
+ CHECK_EQ(global_handle_count, v8::HeapProfiler::GetPersistentHandleCount());
+}
diff --git a/src/3rdparty/v8/test/cctest/test-heap.cc b/src/3rdparty/v8/test/cctest/test-heap.cc
index d6b3c80..f75dc30 100644
--- a/src/3rdparty/v8/test/cctest/test-heap.cc
+++ b/src/3rdparty/v8/test/cctest/test-heap.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
#include <stdlib.h>
@@ -612,7 +612,7 @@ TEST(ObjectProperties) {
CHECK(!obj->HasLocalProperty(*second));
// check string and symbol match
- static const char* string1 = "fisk";
+ const char* string1 = "fisk";
Handle<String> s1 = FACTORY->NewStringFromAscii(CStrVector(string1));
obj->SetProperty(
*s1, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
@@ -620,7 +620,7 @@ TEST(ObjectProperties) {
CHECK(obj->HasLocalProperty(*s1_symbol));
// check symbol and string match
- static const char* string2 = "fugl";
+ const char* string2 = "fugl";
Handle<String> s2_symbol = FACTORY->LookupAsciiSymbol(string2);
obj->SetProperty(
*s2_symbol, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
@@ -676,7 +676,7 @@ TEST(JSArray) {
CHECK(array->HasFastTypeElements());
// array[length] = name.
- array->SetElement(0, *name, kNonStrictMode, true)->ToObjectChecked();
+ array->SetElement(0, *name, NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(Smi::FromInt(1), array->length());
CHECK_EQ(array->GetElement(0), *name);
@@ -691,7 +691,7 @@ TEST(JSArray) {
CHECK(array->HasDictionaryElements()); // Must be in slow mode.
// array[length] = name.
- array->SetElement(int_length, *name, kNonStrictMode, true)->ToObjectChecked();
+ array->SetElement(int_length, *name, NONE, kNonStrictMode)->ToObjectChecked();
uint32_t new_int_length = 0;
CHECK(array->length()->ToArrayIndex(&new_int_length));
CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
@@ -718,8 +718,8 @@ TEST(JSObjectCopy) {
obj->SetProperty(
*second, Smi::FromInt(2), NONE, kNonStrictMode)->ToObjectChecked();
- obj->SetElement(0, *first, kNonStrictMode, true)->ToObjectChecked();
- obj->SetElement(1, *second, kNonStrictMode, true)->ToObjectChecked();
+ obj->SetElement(0, *first, NONE, kNonStrictMode)->ToObjectChecked();
+ obj->SetElement(1, *second, NONE, kNonStrictMode)->ToObjectChecked();
// Make the clone.
Handle<JSObject> clone = Copy(obj);
@@ -737,8 +737,8 @@ TEST(JSObjectCopy) {
clone->SetProperty(
*second, Smi::FromInt(1), NONE, kNonStrictMode)->ToObjectChecked();
- clone->SetElement(0, *second, kNonStrictMode, true)->ToObjectChecked();
- clone->SetElement(1, *first, kNonStrictMode, true)->ToObjectChecked();
+ clone->SetElement(0, *second, NONE, kNonStrictMode)->ToObjectChecked();
+ clone->SetElement(1, *first, NONE, kNonStrictMode)->ToObjectChecked();
CHECK_EQ(obj->GetElement(1), clone->GetElement(0));
CHECK_EQ(obj->GetElement(0), clone->GetElement(1));
@@ -811,7 +811,7 @@ TEST(Iteration) {
// Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
objs[next_objs_index++] = FACTORY->NewJSArray(10);
- objs[next_objs_index++] = FACTORY->NewJSArray(10, TENURED);
+ objs[next_objs_index++] = FACTORY->NewJSArray(10, FAST_ELEMENTS, TENURED);
// Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
objs[next_objs_index++] =
@@ -820,7 +820,7 @@ TEST(Iteration) {
FACTORY->NewStringFromAscii(CStrVector("abcdefghij"), TENURED);
// Allocate a large string (for large object space).
- int large_size = HEAP->MaxObjectSizeInPagedSpace() + 1;
+ int large_size = Page::kMaxNonCodeHeapObjectSize + 1;
char* str = new char[large_size];
for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
str[large_size - 1] = '\0';
@@ -959,17 +959,17 @@ TEST(TestCodeFlushing) {
CHECK(function->shared()->is_compiled());
// TODO(1609) Currently incremental marker does not support code flushing.
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
CHECK(function->shared()->is_compiled());
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
- HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
// foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
@@ -1187,6 +1187,44 @@ TEST(TestInternalWeakListsTraverseWithGC) {
}
+TEST(TestSizeOfObjects) {
+ v8::V8::Initialize();
+
+ // Get initial heap size after several full GCs, which will stabilize
+ // the heap size and return with sweeping finished completely.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(HEAP->old_pointer_space()->IsSweepingComplete());
+ int initial_size = static_cast<int>(HEAP->SizeOfObjects());
+
+ {
+ // Allocate objects on several different old-space pages so that
+ // lazy sweeping kicks in for subsequent GC runs.
+ AlwaysAllocateScope always_allocate;
+ int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
+ for (int i = 1; i <= 100; i++) {
+ HEAP->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
+ CHECK_EQ(initial_size + i * filler_size,
+ static_cast<int>(HEAP->SizeOfObjects()));
+ }
+ }
+
+ // The heap size should go back to initial size after a full GC, even
+ // though sweeping didn't finish yet.
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(!HEAP->old_pointer_space()->IsSweepingComplete());
+ CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
+
+ // Advancing the sweeper step-wise should not change the heap size.
+ while (!HEAP->old_pointer_space()->IsSweepingComplete()) {
+ HEAP->old_pointer_space()->AdvanceSweeper(KB);
+ CHECK_EQ(initial_size, static_cast<int>(HEAP->SizeOfObjects()));
+ }
+}
+
+
TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
InitializeVM();
HEAP->EnsureHeapIsIterable();
@@ -1220,6 +1258,19 @@ TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
}
+static void FillUpNewSpace(NewSpace* new_space) {
+ // Fill up new space to the point that it is completely full. Make sure
+ // that the scavenger does not undo the filling.
+ v8::HandleScope scope;
+ AlwaysAllocateScope always_allocate;
+ intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
+ intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
+ for (intptr_t i = 0; i < number_of_fillers; i++) {
+ CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(32, NOT_TENURED)));
+ }
+}
+
+
TEST(GrowAndShrinkNewSpace) {
InitializeVM();
NewSpace* new_space = HEAP->new_space();
@@ -1231,18 +1282,8 @@ TEST(GrowAndShrinkNewSpace) {
new_capacity = new_space->Capacity();
CHECK(2 * old_capacity == new_capacity);
- // Fill up new space to the point that it is completely full. Make sure
- // that the scavenger does not undo the filling.
old_capacity = new_space->Capacity();
- {
- v8::HandleScope scope;
- AlwaysAllocateScope always_allocate;
- intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
- intptr_t number_of_fillers = (available / FixedArray::SizeFor(1000)) - 10;
- for (intptr_t i = 0; i < number_of_fillers; i++) {
- CHECK(HEAP->InNewSpace(*FACTORY->NewFixedArray(1000, NOT_TENURED)));
- }
- }
+ FillUpNewSpace(new_space);
new_capacity = new_space->Capacity();
CHECK(old_capacity == new_capacity);
@@ -1270,3 +1311,401 @@ TEST(GrowAndShrinkNewSpace) {
new_capacity = new_space->Capacity();
CHECK(old_capacity == new_capacity);
}
+
+
+TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
+ InitializeVM();
+ v8::HandleScope scope;
+ NewSpace* new_space = HEAP->new_space();
+ intptr_t old_capacity, new_capacity;
+ old_capacity = new_space->Capacity();
+ new_space->Grow();
+ new_capacity = new_space->Capacity();
+ CHECK(2 * old_capacity == new_capacity);
+ FillUpNewSpace(new_space);
+ HEAP->CollectAllAvailableGarbage();
+ new_capacity = new_space->Capacity();
+ CHECK(old_capacity == new_capacity);
+}
+
+
+static int NumberOfGlobalObjects() {
+ int count = 0;
+ HeapIterator iterator;
+ for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
+ if (obj->IsGlobalObject()) count++;
+ }
+ return count;
+}
+
+
+// Test that we don't embed maps from foreign contexts into
+// optimized code.
+TEST(LeakGlobalContextViaMap) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope outer_scope;
+ v8::Persistent<v8::Context> ctx1 = v8::Context::New();
+ v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+ ctx1->Enter();
+
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(4, NumberOfGlobalObjects());
+
+ {
+ v8::HandleScope inner_scope;
+ CompileRun("var v = {x: 42}");
+ v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
+ ctx2->Enter();
+ ctx2->Global()->Set(v8_str("o"), v);
+ v8::Local<v8::Value> res = CompileRun(
+ "function f() { return o.x; }"
+ "for (var i = 0; i < 10; ++i) f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ CHECK_EQ(42, res->Int32Value());
+ ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
+ ctx2->Exit();
+ ctx1->Exit();
+ ctx1.Dispose();
+ }
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(2, NumberOfGlobalObjects());
+ ctx2.Dispose();
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(0, NumberOfGlobalObjects());
+}
+
+
+// Test that we don't embed functions from foreign contexts into
+// optimized code.
+TEST(LeakGlobalContextViaFunction) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope outer_scope;
+ v8::Persistent<v8::Context> ctx1 = v8::Context::New();
+ v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+ ctx1->Enter();
+
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(4, NumberOfGlobalObjects());
+
+ {
+ v8::HandleScope inner_scope;
+ CompileRun("var v = function() { return 42; }");
+ v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
+ ctx2->Enter();
+ ctx2->Global()->Set(v8_str("o"), v);
+ v8::Local<v8::Value> res = CompileRun(
+ "function f(x) { return x(); }"
+ "for (var i = 0; i < 10; ++i) f(o);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(o);");
+ CHECK_EQ(42, res->Int32Value());
+ ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
+ ctx2->Exit();
+ ctx1->Exit();
+ ctx1.Dispose();
+ }
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(2, NumberOfGlobalObjects());
+ ctx2.Dispose();
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(0, NumberOfGlobalObjects());
+}
+
+
+TEST(LeakGlobalContextViaMapKeyed) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope outer_scope;
+ v8::Persistent<v8::Context> ctx1 = v8::Context::New();
+ v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+ ctx1->Enter();
+
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(4, NumberOfGlobalObjects());
+
+ {
+ v8::HandleScope inner_scope;
+ CompileRun("var v = [42, 43]");
+ v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
+ ctx2->Enter();
+ ctx2->Global()->Set(v8_str("o"), v);
+ v8::Local<v8::Value> res = CompileRun(
+ "function f() { return o[0]; }"
+ "for (var i = 0; i < 10; ++i) f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ CHECK_EQ(42, res->Int32Value());
+ ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
+ ctx2->Exit();
+ ctx1->Exit();
+ ctx1.Dispose();
+ }
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(2, NumberOfGlobalObjects());
+ ctx2.Dispose();
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(0, NumberOfGlobalObjects());
+}
+
+
+TEST(LeakGlobalContextViaMapProto) {
+ i::FLAG_allow_natives_syntax = true;
+ v8::HandleScope outer_scope;
+ v8::Persistent<v8::Context> ctx1 = v8::Context::New();
+ v8::Persistent<v8::Context> ctx2 = v8::Context::New();
+ ctx1->Enter();
+
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(4, NumberOfGlobalObjects());
+
+ {
+ v8::HandleScope inner_scope;
+ CompileRun("var v = { y: 42}");
+ v8::Local<v8::Value> v = ctx1->Global()->Get(v8_str("v"));
+ ctx2->Enter();
+ ctx2->Global()->Set(v8_str("o"), v);
+ v8::Local<v8::Value> res = CompileRun(
+ "function f() {"
+ " var p = {x: 42};"
+ " p.__proto__ = o;"
+ " return p.x;"
+ "}"
+ "for (var i = 0; i < 10; ++i) f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ CHECK_EQ(42, res->Int32Value());
+ ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0));
+ ctx2->Exit();
+ ctx1->Exit();
+ ctx1.Dispose();
+ }
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(2, NumberOfGlobalObjects());
+ ctx2.Dispose();
+ HEAP->CollectAllAvailableGarbage();
+ CHECK_EQ(0, NumberOfGlobalObjects());
+}
+
+
+TEST(InstanceOfStubWriteBarrier) {
+ i::FLAG_allow_natives_syntax = true;
+#ifdef DEBUG
+ i::FLAG_verify_heap = true;
+#endif
+ InitializeVM();
+ if (!i::V8::UseCrankshaft()) return;
+ v8::HandleScope outer_scope;
+
+ {
+ v8::HandleScope scope;
+ CompileRun(
+ "function foo () { }"
+ "function mkbar () { return new (new Function(\"\")) (); }"
+ "function f (x) { return (x instanceof foo); }"
+ "function g () { f(mkbar()); }"
+ "f(new foo()); f(new foo());"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(new foo()); g();");
+ }
+
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ marking->Abort();
+ marking->Start();
+
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+
+ CHECK(f->IsOptimized());
+
+ while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
+ !marking->IsStopped()) {
+ // Discard any pending GC requests otherwise we will get GC when we enter
+ // code below.
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+
+ CHECK(marking->IsMarking());
+
+ {
+ v8::HandleScope scope;
+ v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
+ v8::Handle<v8::Function> g =
+ v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
+ g->Call(global, 0, NULL);
+ }
+
+ HEAP->incremental_marking()->set_should_hurry(true);
+ HEAP->CollectGarbage(OLD_POINTER_SPACE);
+}
+
+
+TEST(PrototypeTransitionClearing) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ CompileRun(
+ "var base = {};"
+ "var live = [];"
+ "for (var i = 0; i < 10; i++) {"
+ " var object = {};"
+ " var prototype = {};"
+ " object.__proto__ = prototype;"
+ " if (i >= 3) live.push(object, prototype);"
+ "}");
+
+ Handle<JSObject> baseObject =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Object>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("base"))));
+
+ // Verify that only dead prototype transitions are cleared.
+ CHECK_EQ(10, baseObject->map()->NumberOfProtoTransitions());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK_EQ(10 - 3, baseObject->map()->NumberOfProtoTransitions());
+
+ // Verify that prototype transitions array was compacted.
+ FixedArray* trans = baseObject->map()->prototype_transitions();
+ for (int i = 0; i < 10 - 3; i++) {
+ int j = Map::kProtoTransitionHeaderSize +
+ i * Map::kProtoTransitionElementsPerEntry;
+ CHECK(trans->get(j + Map::kProtoTransitionMapOffset)->IsMap());
+ CHECK(trans->get(j + Map::kProtoTransitionPrototypeOffset)->IsJSObject());
+ }
+
+ // Make sure next prototype is placed on an old-space evacuation candidate.
+ Handle<JSObject> prototype;
+ PagedSpace* space = HEAP->old_pointer_space();
+ do {
+ prototype = FACTORY->NewJSArray(32 * KB, FAST_ELEMENTS, TENURED);
+ } while (space->FirstPage() == space->LastPage() ||
+ !space->LastPage()->Contains(prototype->address()));
+
+ // Add a prototype on an evacuation candidate and verify that transition
+ // clearing correctly records slots in prototype transition array.
+ i::FLAG_always_compact = true;
+ Handle<Map> map(baseObject->map());
+ CHECK(!space->LastPage()->Contains(map->prototype_transitions()->address()));
+ CHECK(space->LastPage()->Contains(prototype->address()));
+ baseObject->SetPrototype(*prototype, false)->ToObjectChecked();
+ CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
+ HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ CHECK(map->GetPrototypeTransition(*prototype)->IsMap());
+}
+
+
+TEST(ResetSharedFunctionInfoCountersDuringIncrementalMarking) {
+ i::FLAG_allow_natives_syntax = true;
+#ifdef DEBUG
+ i::FLAG_verify_heap = true;
+#endif
+ InitializeVM();
+ if (!i::V8::UseCrankshaft()) return;
+ v8::HandleScope outer_scope;
+
+ {
+ v8::HandleScope scope;
+ CompileRun(
+ "function f () {"
+ " var s = 0;"
+ " for (var i = 0; i < 100; i++) s += i;"
+ " return s;"
+ "}"
+ "f(); f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ }
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+ CHECK(f->IsOptimized());
+
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ marking->Abort();
+ marking->Start();
+
+ // The following two calls will increment HEAP->global_ic_age().
+ const int kLongIdlePauseInMs = 1000;
+ v8::V8::ContextDisposedNotification();
+ v8::V8::IdleNotification(kLongIdlePauseInMs);
+
+ while (!marking->IsStopped() && !marking->IsComplete()) {
+ marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+
+ CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
+ CHECK_EQ(0, f->shared()->opt_count());
+ CHECK_EQ(0, f->shared()->code()->profiler_ticks());
+}
+
+
+TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
+ i::FLAG_allow_natives_syntax = true;
+#ifdef DEBUG
+ i::FLAG_verify_heap = true;
+#endif
+ InitializeVM();
+ if (!i::V8::UseCrankshaft()) return;
+ v8::HandleScope outer_scope;
+
+ {
+ v8::HandleScope scope;
+ CompileRun(
+ "function f () {"
+ " var s = 0;"
+ " for (var i = 0; i < 100; i++) s += i;"
+ " return s;"
+ "}"
+ "f(); f();"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f();");
+ }
+ Handle<JSFunction> f =
+ v8::Utils::OpenHandle(
+ *v8::Handle<v8::Function>::Cast(
+ v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
+ CHECK(f->IsOptimized());
+
+ HEAP->incremental_marking()->Abort();
+
+ // The following two calls will increment HEAP->global_ic_age().
+ // Since incremental marking is off, IdleNotification will do full GC.
+ const int kLongIdlePauseInMs = 1000;
+ v8::V8::ContextDisposedNotification();
+ v8::V8::IdleNotification(kLongIdlePauseInMs);
+
+ CHECK_EQ(HEAP->global_ic_age(), f->shared()->ic_age());
+ CHECK_EQ(0, f->shared()->opt_count());
+ CHECK_EQ(0, f->shared()->code()->profiler_ticks());
+}
+
+
+// Test that HAllocateObject will always return an object in new-space.
+TEST(OptimizedAllocationAlwaysInNewSpace) {
+ i::FLAG_allow_natives_syntax = true;
+ InitializeVM();
+ if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
+ v8::HandleScope scope;
+
+ FillUpNewSpace(HEAP->new_space());
+ AlwaysAllocateScope always_allocate;
+ v8::Local<v8::Value> res = CompileRun(
+ "function c(x) {"
+ " this.x = x;"
+ " for (var i = 0; i < 32; i++) {"
+ " this['x' + i] = x;"
+ " }"
+ "}"
+ "function f(x) { return new c(x); };"
+ "f(1); f(2); f(3);"
+ "%OptimizeFunctionOnNextCall(f);"
+ "f(4);");
+ CHECK_EQ(4, res->ToObject()->GetRealNamedProperty(v8_str("x"))->Int32Value());
+
+ Handle<JSObject> o =
+ v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
+
+ CHECK(HEAP->InNewSpace(*o));
+}
diff --git a/src/3rdparty/v8/test/cctest/test-lockers.cc b/src/3rdparty/v8/test/cctest/test-lockers.cc
index 7360da5..5035f87 100644
--- a/src/3rdparty/v8/test/cctest/test-lockers.cc
+++ b/src/3rdparty/v8/test/cctest/test-lockers.cc
@@ -204,7 +204,11 @@ static void StartJoinAndDeleteThreads(const i::List<JoinableThread*>& threads) {
// Run many threads all locking on the same isolate
TEST(IsolateLockingStress) {
+#ifdef V8_TARGET_ARCH_MIPS
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
i::List<JoinableThread*> threads(kNThreads);
v8::Isolate* isolate = v8::Isolate::New();
for (int i = 0; i < kNThreads; i++) {
@@ -237,7 +241,7 @@ class IsolateNonlockingThread : public JoinableThread {
// Run many threads each accessing its own isolate without locking
TEST(MultithreadedParallelIsolates) {
-#ifdef V8_TARGET_ARCH_ARM
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
const int kNThreads = 10;
#else
const int kNThreads = 50;
@@ -275,7 +279,11 @@ class IsolateNestedLockingThread : public JoinableThread {
// Run many threads with nested locks
TEST(IsolateNestedLocking) {
+#ifdef V8_TARGET_ARCH_MIPS
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
v8::Isolate* isolate = v8::Isolate::New();
i::List<JoinableThread*> threads(kNThreads);
for (int i = 0; i < kNThreads; i++) {
@@ -311,7 +319,7 @@ class SeparateIsolatesLocksNonexclusiveThread : public JoinableThread {
// Run parallel threads that lock and access different isolates in parallel
TEST(SeparateIsolatesLocksNonexclusive) {
-#ifdef V8_TARGET_ARCH_ARM
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
const int kNThreads = 50;
#else
const int kNThreads = 100;
@@ -385,7 +393,7 @@ class LockerUnlockerThread : public JoinableThread {
// Use unlocker inside of a Locker, multiple threads.
TEST(LockerUnlocker) {
-#ifdef V8_TARGET_ARCH_ARM
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
const int kNThreads = 50;
#else
const int kNThreads = 100;
@@ -438,7 +446,7 @@ class LockTwiceAndUnlockThread : public JoinableThread {
// Use Unlocker inside two Lockers.
TEST(LockTwiceAndUnlock) {
-#ifdef V8_TARGET_ARCH_ARM
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
const int kNThreads = 50;
#else
const int kNThreads = 100;
@@ -559,7 +567,11 @@ class LockUnlockLockThread : public JoinableThread {
// Locker inside an Unlocker inside a Locker.
TEST(LockUnlockLockMultithreaded) {
+#ifdef V8_TARGET_ARCH_MIPS
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
v8::Isolate* isolate = v8::Isolate::New();
Persistent<v8::Context> context;
{
@@ -606,7 +618,11 @@ class LockUnlockLockDefaultIsolateThread : public JoinableThread {
// Locker inside an Unlocker inside a Locker for default isolate.
TEST(LockUnlockLockDefaultIsolateMultithreaded) {
+#ifdef V8_TARGET_ARCH_MIPS
+ const int kNThreads = 50;
+#else
const int kNThreads = 100;
+#endif
Persistent<v8::Context> context;
{
v8::Locker locker_;
@@ -639,3 +655,68 @@ TEST(Regress1433) {
isolate->Dispose();
}
}
+
+
+static const char* kSimpleExtensionSource =
+ "(function Foo() {"
+ " return 4;"
+ "})() ";
+
+class IsolateGenesisThread : public JoinableThread {
+ public:
+ IsolateGenesisThread(int count, const char* extension_names[])
+ : JoinableThread("IsolateGenesisThread"),
+ count_(count),
+ extension_names_(extension_names)
+ {}
+
+ virtual void Run() {
+ v8::Isolate* isolate = v8::Isolate::New();
+ {
+ v8::Isolate::Scope isolate_scope(isolate);
+ CHECK(!i::Isolate::Current()->has_installed_extensions());
+ v8::ExtensionConfiguration extensions(count_, extension_names_);
+ v8::Persistent<v8::Context> context = v8::Context::New(&extensions);
+ CHECK(i::Isolate::Current()->has_installed_extensions());
+ context.Dispose();
+ }
+ isolate->Dispose();
+ }
+ private:
+ int count_;
+ const char** extension_names_;
+};
+
+// Test installing extensions in separate isolates concurrently.
+// http://code.google.com/p/v8/issues/detail?id=1821
+TEST(ExtensionsRegistration) {
+#if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
+ const int kNThreads = 10;
+#else
+ const int kNThreads = 40;
+#endif
+ v8::RegisterExtension(new v8::Extension("test0",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test1",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test2",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test3",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test4",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test5",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test6",
+ kSimpleExtensionSource));
+ v8::RegisterExtension(new v8::Extension("test7",
+ kSimpleExtensionSource));
+ const char* extension_names[] = { "test0", "test1",
+ "test2", "test3", "test4",
+ "test5", "test6", "test7" };
+ i::List<JoinableThread*> threads(kNThreads);
+ for (int i = 0; i < kNThreads; i++) {
+ threads.Add(new IsolateGenesisThread(8, extension_names));
+ }
+ StartJoinAndDeleteThreads(threads);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-log-stack-tracer.cc b/src/3rdparty/v8/test/cctest/test-log-stack-tracer.cc
index f536e6b..6847ef7 100644
--- a/src/3rdparty/v8/test/cctest/test-log-stack-tracer.cc
+++ b/src/3rdparty/v8/test/cctest/test-log-stack-tracer.cc
@@ -307,6 +307,7 @@ TEST(CFromJSStackTrace) {
// Stack tracing will start from the first JS function, i.e. "JSFuncDoTrace"
int base = 0;
CHECK_GT(sample.frames_count, base + 1);
+
CHECK(IsAddressWithinFuncCode("JSFuncDoTrace", sample.stack[base + 0]));
CHECK(IsAddressWithinFuncCode("JSTrace", sample.stack[base + 1]));
}
diff --git a/src/3rdparty/v8/test/cctest/test-mark-compact.cc b/src/3rdparty/v8/test/cctest/test-mark-compact.cc
index e99e1e5..83a576d 100644
--- a/src/3rdparty/v8/test/cctest/test-mark-compact.cc
+++ b/src/3rdparty/v8/test/cctest/test-mark-compact.cc
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,6 +27,14 @@
#include <stdlib.h>
+#ifdef __linux__
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#endif
+
#include "v8.h"
#include "global-handles.h"
@@ -86,7 +94,7 @@ TEST(Promotion) {
// Allocate a fixed array in the new space.
int array_size =
- (HEAP->MaxObjectSizeInPagedSpace() - FixedArray::kHeaderSize) /
+ (Page::kMaxNonCodeHeapObjectSize - FixedArray::kHeaderSize) /
(kPointerSize * 4);
Object* obj = HEAP->AllocateFixedArray(array_size)->ToObjectChecked();
@@ -117,7 +125,7 @@ TEST(NoPromotion) {
// Allocate a big Fixed array in the new space.
int max_size =
- Min(HEAP->MaxObjectSizeInPagedSpace(), HEAP->MaxObjectSizeInNewSpace());
+ Min(Page::kMaxNonCodeHeapObjectSize, HEAP->MaxObjectSizeInNewSpace());
int length = (max_size - FixedArray::kHeaderSize) / (2*kPointerSize);
Object* obj = i::Isolate::Current()->heap()->AllocateFixedArray(length)->
@@ -142,9 +150,6 @@ TEST(NoPromotion) {
// Call mark compact GC, and it should pass.
HEAP->CollectGarbage(OLD_POINTER_SPACE);
-
- // array should not be promoted because the old space is full.
- CHECK(HEAP->InSpace(*array, NEW_SPACE));
}
@@ -447,3 +452,100 @@ TEST(EmptyObjectGroups) {
global_handles->AddImplicitReferences(
Handle<HeapObject>::cast(object).location(), NULL, 0);
}
+
+
+// Here is a memory use test that uses /proc, and is therefore Linux-only. We
+// do not care how much memory the simulator uses, since it is only there for
+// debugging purposes.
+#if defined(__linux__) && !defined(USE_SIMULATOR)
+
+
+static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
+ char* end_address = buffer + *position;
+ uintptr_t result = strtoul(buffer + *position, &end_address, base);
+ CHECK(result != ULONG_MAX || errno != ERANGE);
+ CHECK(end_address > buffer + *position);
+ *position = end_address - buffer;
+ return result;
+}
+
+
+static intptr_t MemoryInUse() {
+ intptr_t memory_use = 0;
+
+ int fd = open("/proc/self/maps", O_RDONLY);
+ if (fd < 0) return -1;
+
+ const int kBufSize = 10000;
+ char buffer[kBufSize];
+ int length = read(fd, buffer, kBufSize);
+ intptr_t line_start = 0;
+ CHECK_LT(length, kBufSize); // Make the buffer bigger.
+ CHECK_GT(length, 0); // We have to find some data in the file.
+ while (line_start < length) {
+ if (buffer[line_start] == '\n') {
+ line_start++;
+ continue;
+ }
+ intptr_t position = line_start;
+ uintptr_t start = ReadLong(buffer, &position, 16);
+ CHECK_EQ(buffer[position++], '-');
+ uintptr_t end = ReadLong(buffer, &position, 16);
+ CHECK_EQ(buffer[position++], ' ');
+ CHECK(buffer[position] == '-' || buffer[position] == 'r');
+ bool read_permission = (buffer[position++] == 'r');
+ CHECK(buffer[position] == '-' || buffer[position] == 'w');
+ bool write_permission = (buffer[position++] == 'w');
+ CHECK(buffer[position] == '-' || buffer[position] == 'x');
+ bool execute_permission = (buffer[position++] == 'x');
+ CHECK(buffer[position] == '-' || buffer[position] == 'p');
+ bool private_mapping = (buffer[position++] == 'p');
+ CHECK_EQ(buffer[position++], ' ');
+ uintptr_t offset = ReadLong(buffer, &position, 16);
+ USE(offset);
+ CHECK_EQ(buffer[position++], ' ');
+ uintptr_t major = ReadLong(buffer, &position, 16);
+ USE(major);
+ CHECK_EQ(buffer[position++], ':');
+ uintptr_t minor = ReadLong(buffer, &position, 16);
+ USE(minor);
+ CHECK_EQ(buffer[position++], ' ');
+ uintptr_t inode = ReadLong(buffer, &position, 10);
+ while (position < length && buffer[position] != '\n') position++;
+ if ((read_permission || write_permission || execute_permission) &&
+ private_mapping && inode == 0) {
+ memory_use += (end - start);
+ }
+
+ line_start = position;
+ }
+ close(fd);
+ return memory_use;
+}
+
+
+TEST(BootUpMemoryUse) {
+ intptr_t initial_memory = MemoryInUse();
+ FLAG_crankshaft = false; // Avoid flakiness.
+ // Only Linux has the proc filesystem and only if it is mapped. If it's not
+ // there we just skip the test.
+ if (initial_memory >= 0) {
+ InitializeVM();
+ intptr_t booted_memory = MemoryInUse();
+ if (sizeof(initial_memory) == 8) {
+ if (v8::internal::Snapshot::IsEnabled()) {
+ CHECK_LE(booted_memory - initial_memory, 3600 * 1024); // 3396.
+ } else {
+ CHECK_LE(booted_memory - initial_memory, 3600 * 1024); // 3432.
+ }
+ } else {
+ if (v8::internal::Snapshot::IsEnabled()) {
+ CHECK_LE(booted_memory - initial_memory, 2600 * 1024); // 2484.
+ } else {
+ CHECK_LE(booted_memory - initial_memory, 2950 * 1024); // 2844
+ }
+ }
+ }
+}
+
+#endif // __linux__ and !USE_SIMULATOR
diff --git a/src/3rdparty/v8/test/cctest/test-parsing.cc b/src/3rdparty/v8/test/cctest/test-parsing.cc
index 00d200a..6bcae7c 100755
--- a/src/3rdparty/v8/test/cctest/test-parsing.cc
+++ b/src/3rdparty/v8/test/cctest/test-parsing.cc
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -32,6 +32,7 @@
#include "v8.h"
#include "cctest.h"
+#include "compiler.h"
#include "execution.h"
#include "isolate.h"
#include "parser.h"
@@ -62,17 +63,18 @@ TEST(ScanKeywords) {
int length = i::StrLength(key_token.keyword);
CHECK(static_cast<int>(sizeof(buffer)) >= length);
{
- i::Utf8ToUC16CharacterStream stream(keyword, length);
+ i::Utf8ToUtf16CharacterStream stream(keyword, length);
i::Scanner scanner(&unicode_cache);
- // The scanner should parse 'let' as Token::LET for this test.
+ // The scanner should parse Harmony keywords for this test.
scanner.SetHarmonyScoping(true);
+ scanner.SetHarmonyModules(true);
scanner.Initialize(&stream);
CHECK_EQ(key_token.token, scanner.Next());
CHECK_EQ(i::Token::EOS, scanner.Next());
}
// Removing characters will make keyword matching fail.
{
- i::Utf8ToUC16CharacterStream stream(keyword, length - 1);
+ i::Utf8ToUtf16CharacterStream stream(keyword, length - 1);
i::Scanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
@@ -83,7 +85,7 @@ TEST(ScanKeywords) {
for (int j = 0; j < static_cast<int>(ARRAY_SIZE(chars_to_append)); ++j) {
memmove(buffer, keyword, length);
buffer[length] = chars_to_append[j];
- i::Utf8ToUC16CharacterStream stream(buffer, length + 1);
+ i::Utf8ToUtf16CharacterStream stream(buffer, length + 1);
i::Scanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
@@ -93,7 +95,7 @@ TEST(ScanKeywords) {
{
memmove(buffer, keyword, length);
buffer[length - 1] = '_';
- i::Utf8ToUC16CharacterStream stream(buffer, length);
+ i::Utf8ToUtf16CharacterStream stream(buffer, length);
i::Scanner scanner(&unicode_cache);
scanner.Initialize(&stream);
CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
@@ -229,7 +231,7 @@ TEST(Preparsing) {
CHECK_EQ(11, error_location.end_pos);
// Should not crash.
const char* message = pre_impl->BuildMessage();
- i::Vector<const char*> args(pre_impl->BuildArgs());
+ pre_impl->BuildArgs();
CHECK_GT(strlen(message), 0);
}
@@ -253,7 +255,7 @@ TEST(StandAlonePreParser) {
uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit();
for (int i = 0; programs[i]; i++) {
const char* program = programs[i];
- i::Utf8ToUC16CharacterStream stream(
+ i::Utf8ToUtf16CharacterStream stream(
reinterpret_cast<const i::byte*>(program),
static_cast<unsigned>(strlen(program)));
i::CompleteParserRecorder log;
@@ -289,7 +291,7 @@ TEST(StandAlonePreParserNoNatives) {
uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit();
for (int i = 0; programs[i]; i++) {
const char* program = programs[i];
- i::Utf8ToUC16CharacterStream stream(
+ i::Utf8ToUtf16CharacterStream stream(
reinterpret_cast<const i::byte*>(program),
static_cast<unsigned>(strlen(program)));
i::CompleteParserRecorder log;
@@ -324,8 +326,9 @@ TEST(RegressChromium62639) {
// and then used the invalid currently scanned literal. This always
// failed in debug mode, and sometimes crashed in release mode.
- i::Utf8ToUC16CharacterStream stream(reinterpret_cast<const i::byte*>(program),
- static_cast<unsigned>(strlen(program)));
+ i::Utf8ToUtf16CharacterStream stream(
+ reinterpret_cast<const i::byte*>(program),
+ static_cast<unsigned>(strlen(program)));
i::ScriptDataImpl* data =
i::ParserApi::PreParse(&stream, NULL, false);
CHECK(data->HasError());
@@ -348,17 +351,17 @@ TEST(Regress928) {
"try { } catch (e) { var foo = function () { /* first */ } }"
"var bar = function () { /* second */ }";
- i::Utf8ToUC16CharacterStream stream(reinterpret_cast<const i::byte*>(program),
- static_cast<unsigned>(strlen(program)));
- i::ScriptDataImpl* data =
- i::ParserApi::PartialPreParse(&stream, NULL, false);
+ v8::HandleScope handles;
+ i::Handle<i::String> source(
+ FACTORY->NewStringFromAscii(i::CStrVector(program)));
+ i::ScriptDataImpl* data = i::ParserApi::PartialPreParse(source, NULL, false);
CHECK(!data->HasError());
data->Initialize();
int first_function =
static_cast<int>(strstr(program, "function") - program);
- int first_lbrace = first_function + static_cast<int>(strlen("function () "));
+ int first_lbrace = first_function + i::StrLength("function () ");
CHECK_EQ('{', program[first_lbrace]);
i::FunctionEntry entry1 = data->GetFunctionEntry(first_lbrace);
CHECK(!entry1.is_valid());
@@ -366,7 +369,7 @@ TEST(Regress928) {
int second_function =
static_cast<int>(strstr(program + first_lbrace, "function") - program);
int second_lbrace =
- second_function + static_cast<int>(strlen("function () "));
+ second_function + i::StrLength("function () ");
CHECK_EQ('{', program[second_lbrace]);
i::FunctionEntry entry2 = data->GetFunctionEntry(second_lbrace);
CHECK(entry2.is_valid());
@@ -390,7 +393,7 @@ TEST(PreParseOverflow) {
uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit();
- i::Utf8ToUC16CharacterStream stream(
+ i::Utf8ToUtf16CharacterStream stream(
reinterpret_cast<const i::byte*>(*program),
static_cast<unsigned>(kProgramSize));
i::CompleteParserRecorder log;
@@ -447,10 +450,10 @@ void TestCharacterStream(const char* ascii_source,
i::Handle<i::String> uc16_string(
FACTORY->NewExternalStringFromTwoByte(&resource));
- i::ExternalTwoByteStringUC16CharacterStream uc16_stream(
+ i::ExternalTwoByteStringUtf16CharacterStream uc16_stream(
i::Handle<i::ExternalTwoByteString>::cast(uc16_string), start, end);
- i::GenericStringUC16CharacterStream string_stream(ascii_string, start, end);
- i::Utf8ToUC16CharacterStream utf8_stream(
+ i::GenericStringUtf16CharacterStream string_stream(ascii_string, start, end);
+ i::Utf8ToUtf16CharacterStream utf8_stream(
reinterpret_cast<const i::byte*>(ascii_source), end);
utf8_stream.SeekForward(start);
@@ -573,12 +576,14 @@ TEST(Utf8CharacterStream) {
char buffer[kAllUtf8CharsSizeU];
unsigned cursor = 0;
for (int i = 0; i <= kMaxUC16Char; i++) {
- cursor += unibrow::Utf8::Encode(buffer + cursor, i);
+ cursor += unibrow::Utf8::Encode(buffer + cursor,
+ i,
+ unibrow::Utf16::kNoPreviousCharacter);
}
ASSERT(cursor == kAllUtf8CharsSizeU);
- i::Utf8ToUC16CharacterStream stream(reinterpret_cast<const i::byte*>(buffer),
- kAllUtf8CharsSizeU);
+ i::Utf8ToUtf16CharacterStream stream(reinterpret_cast<const i::byte*>(buffer),
+ kAllUtf8CharsSizeU);
for (int i = 0; i <= kMaxUC16Char; i++) {
CHECK_EQU(i, stream.pos());
int32_t c = stream.Advance();
@@ -608,7 +613,7 @@ TEST(Utf8CharacterStream) {
#undef CHECK_EQU
-void TestStreamScanner(i::UC16CharacterStream* stream,
+void TestStreamScanner(i::Utf16CharacterStream* stream,
i::Token::Value* expected_tokens,
int skip_pos = 0, // Zero means not skipping.
int skip_to = 0) {
@@ -631,8 +636,8 @@ TEST(StreamScanner) {
v8::V8::Initialize();
const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib";
- i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
- static_cast<unsigned>(strlen(str1)));
+ i::Utf8ToUtf16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
+ static_cast<unsigned>(strlen(str1)));
i::Token::Value expectations1[] = {
i::Token::LBRACE,
i::Token::IDENTIFIER,
@@ -650,8 +655,8 @@ TEST(StreamScanner) {
TestStreamScanner(&stream1, expectations1, 0, 0);
const char* str2 = "case default const {THIS\nPART\nSKIPPED} do";
- i::Utf8ToUC16CharacterStream stream2(reinterpret_cast<const i::byte*>(str2),
- static_cast<unsigned>(strlen(str2)));
+ i::Utf8ToUtf16CharacterStream stream2(reinterpret_cast<const i::byte*>(str2),
+ static_cast<unsigned>(strlen(str2)));
i::Token::Value expectations2[] = {
i::Token::CASE,
i::Token::DEFAULT,
@@ -681,7 +686,7 @@ TEST(StreamScanner) {
for (int i = 0; i <= 4; i++) {
expectations3[6 - i] = i::Token::ILLEGAL;
expectations3[5 - i] = i::Token::EOS;
- i::Utf8ToUC16CharacterStream stream3(
+ i::Utf8ToUtf16CharacterStream stream3(
reinterpret_cast<const i::byte*>(str3),
static_cast<unsigned>(strlen(str3)));
TestStreamScanner(&stream3, expectations3, 1, 1 + i);
@@ -690,7 +695,7 @@ TEST(StreamScanner) {
void TestScanRegExp(const char* re_source, const char* expected) {
- i::Utf8ToUC16CharacterStream stream(
+ i::Utf8ToUtf16CharacterStream stream(
reinterpret_cast<const i::byte*>(re_source),
static_cast<unsigned>(strlen(re_source)));
i::Scanner scanner(i::Isolate::Current()->unicode_cache());
@@ -746,6 +751,67 @@ TEST(RegExpScanning) {
}
+static int Utf8LengthHelper(const char* s) {
+ int len = i::StrLength(s);
+ int character_length = len;
+ for (int i = 0; i < len; i++) {
+ unsigned char c = s[i];
+ int input_offset = 0;
+ int output_adjust = 0;
+ if (c > 0x7f) {
+ if (c < 0xc0) continue;
+ if (c >= 0xf0) {
+ if (c >= 0xf8) {
+ // 5 and 6 byte UTF-8 sequences turn into a kBadChar for each UTF-8
+ // byte.
+ continue; // Handle first UTF-8 byte.
+ }
+ if ((c & 7) == 0 && ((s[i + 1] & 0x30) == 0)) {
+ // This 4 byte sequence could have been coded as a 3 byte sequence.
+ // Record a single kBadChar for the first byte and continue.
+ continue;
+ }
+ input_offset = 3;
+ // 4 bytes of UTF-8 turn into 2 UTF-16 code units.
+ character_length -= 2;
+ } else if (c >= 0xe0) {
+ if ((c & 0xf) == 0 && ((s[i + 1] & 0x20) == 0)) {
+ // This 3 byte sequence could have been coded as a 2 byte sequence.
+ // Record a single kBadChar for the first byte and continue.
+ continue;
+ }
+ input_offset = 2;
+ // 3 bytes of UTF-8 turn into 1 UTF-16 code unit.
+ output_adjust = 2;
+ } else {
+ if ((c & 0x1e) == 0) {
+ // This 2 byte sequence could have been coded as a 1 byte sequence.
+ // Record a single kBadChar for the first byte and continue.
+ continue;
+ }
+ input_offset = 1;
+ // 2 bytes of UTF-8 turn into 1 UTF-16 code unit.
+ output_adjust = 1;
+ }
+ bool bad = false;
+ for (int j = 1; j <= input_offset; j++) {
+ if ((s[i + j] & 0xc0) != 0x80) {
+ // Bad UTF-8 sequence turns the first in the sequence into kBadChar,
+ // which is a single UTF-16 code unit.
+ bad = true;
+ break;
+ }
+ }
+ if (!bad) {
+ i += input_offset;
+ character_length -= output_adjust;
+ }
+ }
+ }
+ return character_length;
+}
+
+
TEST(ScopePositions) {
// Test the parser for correctly setting the start and end positions
// of a scope. We check the scope positions of exactly one scope
@@ -758,76 +824,167 @@ TEST(ScopePositions) {
const char* inner_source;
const char* outer_suffix;
i::ScopeType scope_type;
+ i::LanguageMode language_mode;
};
const SourceData source_data[] = {
- { " with ({}) ", "{ block; }", " more;", i::WITH_SCOPE },
- { " with ({}) ", "{ block; }", "; more;", i::WITH_SCOPE },
+ { " with ({}) ", "{ block; }", " more;", i::WITH_SCOPE, i::CLASSIC_MODE },
+ { " with ({}) ", "{ block; }", "; more;", i::WITH_SCOPE, i::CLASSIC_MODE },
{ " with ({}) ", "{\n"
" block;\n"
" }", "\n"
- " more;", i::WITH_SCOPE },
- { " with ({}) ", "statement;", " more;", i::WITH_SCOPE },
+ " more;", i::WITH_SCOPE, i::CLASSIC_MODE },
+ { " with ({}) ", "statement;", " more;", i::WITH_SCOPE, i::CLASSIC_MODE },
{ " with ({}) ", "statement", "\n"
- " more;", i::WITH_SCOPE },
+ " more;", i::WITH_SCOPE, i::CLASSIC_MODE },
{ " with ({})\n"
" ", "statement;", "\n"
- " more;", i::WITH_SCOPE },
- { " try {} catch ", "(e) { block; }", " more;", i::CATCH_SCOPE },
- { " try {} catch ", "(e) { block; }", "; more;", i::CATCH_SCOPE },
+ " more;", i::WITH_SCOPE, i::CLASSIC_MODE },
+ { " try {} catch ", "(e) { block; }", " more;",
+ i::CATCH_SCOPE, i::CLASSIC_MODE },
+ { " try {} catch ", "(e) { block; }", "; more;",
+ i::CATCH_SCOPE, i::CLASSIC_MODE },
{ " try {} catch ", "(e) {\n"
" block;\n"
" }", "\n"
- " more;", i::CATCH_SCOPE },
+ " more;", i::CATCH_SCOPE, i::CLASSIC_MODE },
{ " try {} catch ", "(e) { block; }", " finally { block; } more;",
- i::CATCH_SCOPE },
+ i::CATCH_SCOPE, i::CLASSIC_MODE },
{ " start;\n"
- " ", "{ let block; }", " more;", i::BLOCK_SCOPE },
+ " ", "{ let block; }", " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " start;\n"
- " ", "{ let block; }", "; more;", i::BLOCK_SCOPE },
+ " ", "{ let block; }", "; more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " start;\n"
" ", "{\n"
" let block;\n"
" }", "\n"
- " more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " start;\n"
" function fun", "(a,b) { infunction; }", " more;",
- i::FUNCTION_SCOPE },
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
{ " start;\n"
" function fun", "(a,b) {\n"
" infunction;\n"
" }", "\n"
- " more;", i::FUNCTION_SCOPE },
+ " more;", i::FUNCTION_SCOPE, i::CLASSIC_MODE },
{ " (function fun", "(a,b) { infunction; }", ")();",
- i::FUNCTION_SCOPE },
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x) { block; }", " more;",
- i::BLOCK_SCOPE },
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x) { block; }", "; more;",
- i::BLOCK_SCOPE },
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x) {\n"
" block;\n"
" }", "\n"
- " more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x) statement;", " more;",
- i::BLOCK_SCOPE },
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x) statement", "\n"
- " more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x = 1 ; x < 10; ++ x)\n"
" statement;", "\n"
- " more;", i::BLOCK_SCOPE },
- { " for ", "(let x in {}) { block; }", " more;", i::BLOCK_SCOPE },
- { " for ", "(let x in {}) { block; }", "; more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
+ { " for ", "(let x in {}) { block; }", " more;",
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
+ { " for ", "(let x in {}) { block; }", "; more;",
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x in {}) {\n"
" block;\n"
" }", "\n"
- " more;", i::BLOCK_SCOPE },
- { " for ", "(let x in {}) statement;", " more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
+ { " for ", "(let x in {}) statement;", " more;",
+ i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x in {}) statement", "\n"
- " more;", i::BLOCK_SCOPE },
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
{ " for ", "(let x in {})\n"
" statement;", "\n"
- " more;", i::BLOCK_SCOPE },
- { NULL, NULL, NULL, i::EVAL_SCOPE }
+ " more;", i::BLOCK_SCOPE, i::EXTENDED_MODE },
+ // Check that 6-byte and 4-byte encodings of UTF-8 strings do not throw
+ // the preparser off in terms of byte offsets.
+ // 6 byte encoding.
+ { " 'foo\355\240\201\355\260\211';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // 4 byte encoding.
+ { " 'foo\360\220\220\212';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // 3 byte encoding of \u0fff.
+ { " 'foo\340\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 6 byte encoding with missing last byte.
+ { " 'foo\355\240\201\355\211';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 3 byte encoding of \u0fff with missing last byte.
+ { " 'foo\340\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 3 byte encoding of \u0fff with missing 2 last bytes.
+ { " 'foo\340';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 3 byte encoding of \u00ff should be a 2 byte encoding.
+ { " 'foo\340\203\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 3 byte encoding of \u007f should be a 2 byte encoding.
+ { " 'foo\340\201\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Unpaired lead surrogate.
+ { " 'foo\355\240\201';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Unpaired lead surrogate where following code point is a 3 byte sequence.
+ { " 'foo\355\240\201\340\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Unpaired lead surrogate where following code point is a 4 byte encoding
+ // of a trail surrogate.
+ { " 'foo\355\240\201\360\215\260\211';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Unpaired trail surrogate.
+ { " 'foo\355\260\211';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // 2 byte encoding of \u00ff.
+ { " 'foo\303\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 2 byte encoding of \u00ff with missing last byte.
+ { " 'foo\303';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Broken 2 byte encoding of \u007f should be a 1 byte encoding.
+ { " 'foo\301\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Illegal 5 byte encoding.
+ { " 'foo\370\277\277\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Illegal 6 byte encoding.
+ { " 'foo\374\277\277\277\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Illegal 0xfe byte
+ { " 'foo\376\277\277\277\277\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ // Illegal 0xff byte
+ { " 'foo\377\277\277\277\277\277\277\277';\n"
+ " (function fun", "(a,b) { infunction; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ { " 'foo';\n"
+ " (function fun", "(a,b) { 'bar\355\240\201\355\260\213'; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ { " 'foo';\n"
+ " (function fun", "(a,b) { 'bar\360\220\220\214'; }", ")();",
+ i::FUNCTION_SCOPE, i::CLASSIC_MODE },
+ { NULL, NULL, NULL, i::EVAL_SCOPE, i::CLASSIC_MODE }
};
v8::HandleScope handles;
@@ -837,28 +994,34 @@ TEST(ScopePositions) {
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
+ i::FLAG_harmony_scoping = true;
for (int i = 0; source_data[i].outer_prefix; i++) {
- int kPrefixLen = i::StrLength(source_data[i].outer_prefix);
- int kInnerLen = i::StrLength(source_data[i].inner_source);
- int kSuffixLen = i::StrLength(source_data[i].outer_suffix);
+ int kPrefixLen = Utf8LengthHelper(source_data[i].outer_prefix);
+ int kInnerLen = Utf8LengthHelper(source_data[i].inner_source);
+ int kSuffixLen = Utf8LengthHelper(source_data[i].outer_suffix);
+ int kPrefixByteLen = i::StrLength(source_data[i].outer_prefix);
+ int kInnerByteLen = i::StrLength(source_data[i].inner_source);
+ int kSuffixByteLen = i::StrLength(source_data[i].outer_suffix);
int kProgramSize = kPrefixLen + kInnerLen + kSuffixLen;
- i::Vector<char> program = i::Vector<char>::New(kProgramSize + 1);
- int length = i::OS::SNPrintF(program, "%s%s%s",
- source_data[i].outer_prefix,
- source_data[i].inner_source,
- source_data[i].outer_suffix);
- CHECK(length == kProgramSize);
+ int kProgramByteSize = kPrefixByteLen + kInnerByteLen + kSuffixByteLen;
+ i::Vector<char> program = i::Vector<char>::New(kProgramByteSize + 1);
+ i::OS::SNPrintF(program, "%s%s%s",
+ source_data[i].outer_prefix,
+ source_data[i].inner_source,
+ source_data[i].outer_suffix);
// Parse program source.
i::Handle<i::String> source(
- FACTORY->NewStringFromAscii(i::CStrVector(program.start())));
+ FACTORY->NewStringFromUtf8(i::CStrVector(program.start())));
+ CHECK_EQ(source->length(), kProgramSize);
i::Handle<i::Script> script = FACTORY->NewScript(source);
- i::Parser parser(script, false, NULL, NULL);
- parser.SetHarmonyScoping(true);
- i::FunctionLiteral* function =
- parser.ParseProgram(source, true, i::kNonStrictMode);
- ASSERT(function != NULL);
+ i::Parser parser(script, i::kAllowLazy | i::EXTENDED_MODE, NULL, NULL);
+ i::CompilationInfo info(script);
+ info.MarkAsGlobal();
+ info.SetLanguageMode(source_data[i].language_mode);
+ i::FunctionLiteral* function = parser.ParseProgram(&info);
+ CHECK(function != NULL);
// Check scope types and positions.
i::Scope* scope = function->scope();
@@ -875,3 +1038,201 @@ TEST(ScopePositions) {
CHECK_EQ(inner_scope->end_position(), kPrefixLen + kInnerLen);
}
}
+
+
+void TestParserSync(i::Handle<i::String> source, int flags) {
+ uintptr_t stack_limit = i::Isolate::Current()->stack_guard()->real_climit();
+ bool harmony_scoping = ((i::kLanguageModeMask & flags) == i::EXTENDED_MODE);
+
+ // Preparse the data.
+ i::CompleteParserRecorder log;
+ i::Scanner scanner(i::Isolate::Current()->unicode_cache());
+ i::GenericStringUtf16CharacterStream stream(source, 0, source->length());
+ scanner.SetHarmonyScoping(harmony_scoping);
+ scanner.Initialize(&stream);
+ v8::preparser::PreParser::PreParseResult result =
+ v8::preparser::PreParser::PreParseProgram(
+ &scanner, &log, flags, stack_limit);
+ CHECK_EQ(v8::preparser::PreParser::kPreParseSuccess, result);
+ i::ScriptDataImpl data(log.ExtractData());
+
+ // Parse the data
+ i::Handle<i::Script> script = FACTORY->NewScript(source);
+ bool save_harmony_scoping = i::FLAG_harmony_scoping;
+ i::FLAG_harmony_scoping = harmony_scoping;
+ i::Parser parser(script, flags, NULL, NULL);
+ i::CompilationInfo info(script);
+ info.MarkAsGlobal();
+ i::FunctionLiteral* function = parser.ParseProgram(&info);
+ i::FLAG_harmony_scoping = save_harmony_scoping;
+
+ i::String* type_string = NULL;
+ if (function == NULL) {
+ // Extract exception from the parser.
+ i::Handle<i::String> type_symbol = FACTORY->LookupAsciiSymbol("type");
+ CHECK(i::Isolate::Current()->has_pending_exception());
+ i::MaybeObject* maybe_object = i::Isolate::Current()->pending_exception();
+ i::JSObject* exception = NULL;
+ CHECK(maybe_object->To(&exception));
+
+ // Get the type string.
+ maybe_object = exception->GetProperty(*type_symbol);
+ CHECK(maybe_object->To(&type_string));
+ }
+
+ // Check that preparsing fails iff parsing fails.
+ if (data.has_error() && function != NULL) {
+ i::OS::Print(
+ "Preparser failed on:\n"
+ "\t%s\n"
+ "with error:\n"
+ "\t%s\n"
+ "However, the parser succeeded",
+ *source->ToCString(), data.BuildMessage());
+ CHECK(false);
+ } else if (!data.has_error() && function == NULL) {
+ i::OS::Print(
+ "Parser failed on:\n"
+ "\t%s\n"
+ "with error:\n"
+ "\t%s\n"
+ "However, the preparser succeeded",
+ *source->ToCString(), *type_string->ToCString());
+ CHECK(false);
+ }
+
+ // Check that preparser and parser produce the same error.
+ if (function == NULL) {
+ if (!type_string->IsEqualTo(i::CStrVector(data.BuildMessage()))) {
+ i::OS::Print(
+ "Expected parser and preparser to produce the same error on:\n"
+ "\t%s\n"
+ "However, found the following error messages\n"
+ "\tparser: %s\n"
+ "\tpreparser: %s\n",
+ *source->ToCString(), *type_string->ToCString(), data.BuildMessage());
+ CHECK(false);
+ }
+ }
+}
+
+
+void TestParserSyncWithFlags(i::Handle<i::String> source) {
+ static const int kFlagsCount = 6;
+ const int flags[kFlagsCount] = {
+ i::kNoParsingFlags | i::CLASSIC_MODE,
+ i::kNoParsingFlags | i::STRICT_MODE,
+ i::kNoParsingFlags | i::EXTENDED_MODE,
+ i::kAllowLazy | i::CLASSIC_MODE,
+ i::kAllowLazy | i::STRICT_MODE,
+ i::kAllowLazy | i::EXTENDED_MODE
+ };
+
+ for (int k = 0; k < kFlagsCount; ++k) {
+ TestParserSync(source, flags[k]);
+ }
+}
+
+
+TEST(ParserSync) {
+ const char* context_data[][2] = {
+ { "", "" },
+ { "{", "}" },
+ { "if (true) ", " else {}" },
+ { "if (true) {} else ", "" },
+ { "if (true) ", "" },
+ { "do ", " while (false)" },
+ { "while (false) ", "" },
+ { "for (;;) ", "" },
+ { "with ({})", "" },
+ { "switch (12) { case 12: ", "}" },
+ { "switch (12) { default: ", "}" },
+ { "label2: ", "" },
+ { NULL, NULL }
+ };
+
+ const char* statement_data[] = {
+ "{}",
+ "var x",
+ "var x = 1",
+ "const x",
+ "const x = 1",
+ ";",
+ "12",
+ "if (false) {} else ;",
+ "if (false) {} else {}",
+ "if (false) {} else 12",
+ "if (false) ;"
+ "if (false) {}",
+ "if (false) 12",
+ "do {} while (false)",
+ "for (;;) ;",
+ "for (;;) {}",
+ "for (;;) 12",
+ "continue",
+ "continue label",
+ "continue\nlabel",
+ "break",
+ "break label",
+ "break\nlabel",
+ "return",
+ "return 12",
+ "return\n12",
+ "with ({}) ;",
+ "with ({}) {}",
+ "with ({}) 12",
+ "switch ({}) { default: }"
+ "label3: "
+ "throw",
+ "throw 12",
+ "throw\n12",
+ "try {} catch(e) {}",
+ "try {} finally {}",
+ "try {} catch(e) {} finally {}",
+ "debugger",
+ NULL
+ };
+
+ const char* termination_data[] = {
+ "",
+ ";",
+ "\n",
+ ";\n",
+ "\n;",
+ NULL
+ };
+
+ v8::HandleScope handles;
+ v8::Persistent<v8::Context> context = v8::Context::New();
+ v8::Context::Scope context_scope(context);
+
+ int marker;
+ i::Isolate::Current()->stack_guard()->SetStackLimit(
+ reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
+
+ for (int i = 0; context_data[i][0] != NULL; ++i) {
+ for (int j = 0; statement_data[j] != NULL; ++j) {
+ for (int k = 0; termination_data[k] != NULL; ++k) {
+ int kPrefixLen = i::StrLength(context_data[i][0]);
+ int kStatementLen = i::StrLength(statement_data[j]);
+ int kTerminationLen = i::StrLength(termination_data[k]);
+ int kSuffixLen = i::StrLength(context_data[i][1]);
+ int kProgramSize = kPrefixLen + kStatementLen + kTerminationLen
+ + kSuffixLen + i::StrLength("label: for (;;) { }");
+
+ // Plug the source code pieces together.
+ i::Vector<char> program = i::Vector<char>::New(kProgramSize + 1);
+ int length = i::OS::SNPrintF(program,
+ "label: for (;;) { %s%s%s%s }",
+ context_data[i][0],
+ statement_data[j],
+ termination_data[k],
+ context_data[i][1]);
+ CHECK(length == kProgramSize);
+ i::Handle<i::String> source =
+ FACTORY->NewStringFromAscii(i::CStrVector(program.start()));
+ TestParserSyncWithFlags(source);
+ }
+ }
+ }
+}
diff --git a/src/3rdparty/v8/test/cctest/test-platform-linux.cc b/src/3rdparty/v8/test/cctest/test-platform-linux.cc
index 756b947..2a8d497 100644
--- a/src/3rdparty/v8/test/cctest/test-platform-linux.cc
+++ b/src/3rdparty/v8/test/cctest/test-platform-linux.cc
@@ -67,7 +67,7 @@ TEST(BusyLock) {
TEST(VirtualMemory) {
- OS::Setup();
+ OS::SetUp();
VirtualMemory* vm = new VirtualMemory(1 * MB);
CHECK(vm->IsReserved());
void* block_addr = vm->address();
diff --git a/src/3rdparty/v8/test/cctest/test-platform-win32.cc b/src/3rdparty/v8/test/cctest/test-platform-win32.cc
index 9bd0014..36b30aa 100644
--- a/src/3rdparty/v8/test/cctest/test-platform-win32.cc
+++ b/src/3rdparty/v8/test/cctest/test-platform-win32.cc
@@ -13,7 +13,7 @@ using namespace ::v8::internal;
TEST(VirtualMemory) {
- OS::Setup();
+ OS::SetUp();
VirtualMemory* vm = new VirtualMemory(1 * MB);
CHECK(vm->IsReserved());
void* block_addr = vm->address();
diff --git a/src/3rdparty/v8/test/cctest/test-random.cc b/src/3rdparty/v8/test/cctest/test-random.cc
new file mode 100644
index 0000000..a1f4931
--- /dev/null
+++ b/src/3rdparty/v8/test/cctest/test-random.cc
@@ -0,0 +1,109 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+#include "v8.h"
+
+#include "cctest.h"
+#include "compiler.h"
+#include "execution.h"
+#include "isolate.h"
+
+
+using namespace v8::internal;
+
+static v8::Persistent<v8::Context> env;
+
+
+void SetSeeds(Handle<ByteArray> seeds, uint32_t state0, uint32_t state1) {
+ for (int i = 0; i < 4; i++) {
+ seeds->set(i, static_cast<byte>(state0 >> (i * kBitsPerByte)));
+ seeds->set(i + 4, static_cast<byte>(state1 >> (i * kBitsPerByte)));
+ }
+}
+
+
+void TestSeeds(Handle<JSFunction> fun,
+ Handle<Context> context,
+ uint32_t state0,
+ uint32_t state1) {
+ bool has_pending_exception;
+ Handle<JSObject> global(context->global());
+ Handle<ByteArray> seeds(context->random_seed());
+
+ SetSeeds(seeds, state0, state1);
+ Handle<Object> value =
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ CHECK(value->IsHeapNumber());
+ CHECK(fun->IsOptimized());
+ double crankshaft_value = HeapNumber::cast(*value)->value();
+
+ SetSeeds(seeds, state0, state1);
+ V8::FillHeapNumberWithRandom(*value, *context);
+ double runtime_value = HeapNumber::cast(*value)->value();
+ CHECK_EQ(runtime_value, crankshaft_value);
+}
+
+
+TEST(CrankshaftRandom) {
+ if (env.IsEmpty()) env = v8::Context::New();
+ // Skip test if crankshaft is disabled.
+ if (!V8::UseCrankshaft()) return;
+ v8::HandleScope scope;
+ env->Enter();
+
+ Handle<Context> context(Isolate::Current()->context());
+ Handle<JSObject> global(context->global());
+ Handle<ByteArray> seeds(context->random_seed());
+ bool has_pending_exception;
+
+ CompileRun("function f() { return Math.random(); }");
+
+ Object* symbol = FACTORY->LookupAsciiSymbol("f")->ToObjectChecked();
+ MaybeObject* fun_object =
+ context->global()->GetProperty(String::cast(symbol));
+ Handle<JSFunction> fun(JSFunction::cast(fun_object->ToObjectChecked()));
+
+ // Optimize function.
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ if (!fun->IsOptimized()) fun->MarkForLazyRecompilation();
+
+ // Test with some random values.
+ TestSeeds(fun, context, 0xC0C0AFFE, 0x31415926);
+ TestSeeds(fun, context, 0x01020304, 0xFFFFFFFF);
+ TestSeeds(fun, context, 0x00000001, 0x00000000);
+
+ // Test that we bail out to runtime when seeds are uninitialized (zeros).
+ SetSeeds(seeds, 0, 0);
+ Handle<Object> value =
+ Execution::Call(fun, global, 0, NULL, &has_pending_exception);
+ CHECK(value->IsHeapNumber());
+ CHECK(fun->IsOptimized());
+ double crankshaft_value = HeapNumber::cast(*value)->value();
+ CHECK_NE(0.0, crankshaft_value);
+}
diff --git a/src/3rdparty/v8/test/cctest/test-regexp.cc b/src/3rdparty/v8/test/cctest/test-regexp.cc
index 3070e16..54898a0 100644
--- a/src/3rdparty/v8/test/cctest/test-regexp.cc
+++ b/src/3rdparty/v8/test/cctest/test-regexp.cc
@@ -449,6 +449,7 @@ static bool IsWhiteSpace(uc16 c) {
case 0xA0:
case 0x2028:
case 0x2029:
+ case 0xFEFF:
return true;
default:
return unibrow::Space::Is(c);
@@ -503,7 +504,10 @@ static RegExpNode* Compile(const char* input, bool multiline, bool is_ascii) {
return NULL;
Handle<String> pattern = isolate->factory()->
NewStringFromUtf8(CStrVector(input));
- RegExpEngine::Compile(&compile_data, false, multiline, pattern, is_ascii);
+ Handle<String> sample_subject =
+ isolate->factory()->NewStringFromUtf8(CStrVector(""));
+ RegExpEngine::Compile(
+ &compile_data, false, multiline, pattern, sample_subject, is_ascii);
return compile_data.node;
}
diff --git a/src/3rdparty/v8/test/cctest/test-serialize.cc b/src/3rdparty/v8/test/cctest/test-serialize.cc
index b5c1a09..e426e7b 100644
--- a/src/3rdparty/v8/test/cctest/test-serialize.cc
+++ b/src/3rdparty/v8/test/cctest/test-serialize.cc
@@ -558,7 +558,8 @@ DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
TEST(LinearAllocation) {
v8::V8::Initialize();
int new_space_max = 512 * KB;
- int paged_space_max = Page::kMaxHeapObjectSize;
+ int paged_space_max = Page::kMaxNonCodeHeapObjectSize;
+ int code_space_max = HEAP->code_space()->AreaSize();
for (int size = 1000; size < 5 * MB; size += size >> 1) {
size &= ~8; // Round.
@@ -568,7 +569,7 @@ TEST(LinearAllocation) {
new_space_size,
paged_space_size, // Old pointer space.
paged_space_size, // Old data space.
- HEAP->code_space()->RoundSizeDownToObjectAlignment(paged_space_size),
+ HEAP->code_space()->RoundSizeDownToObjectAlignment(code_space_max),
HEAP->map_space()->RoundSizeDownToObjectAlignment(paged_space_size),
HEAP->cell_space()->RoundSizeDownToObjectAlignment(paged_space_size),
size); // Large object space.
@@ -604,7 +605,7 @@ TEST(LinearAllocation) {
int old_page_fullness = i % Page::kPageSize;
int page_fullness = (i + kSmallFixedArraySize) % Page::kPageSize;
if (page_fullness < old_page_fullness ||
- page_fullness > Page::kObjectAreaSize) {
+ page_fullness > HEAP->old_pointer_space()->AreaSize()) {
i = RoundUp(i, Page::kPageSize);
pointer_last = NULL;
}
@@ -624,7 +625,7 @@ TEST(LinearAllocation) {
int old_page_fullness = i % Page::kPageSize;
int page_fullness = (i + kSmallStringSize) % Page::kPageSize;
if (page_fullness < old_page_fullness ||
- page_fullness > Page::kObjectAreaSize) {
+ page_fullness > HEAP->old_data_space()->AreaSize()) {
i = RoundUp(i, Page::kPageSize);
data_last = NULL;
}
@@ -642,7 +643,7 @@ TEST(LinearAllocation) {
int old_page_fullness = i % Page::kPageSize;
int page_fullness = (i + kMapSize) % Page::kPageSize;
if (page_fullness < old_page_fullness ||
- page_fullness > Page::kObjectAreaSize) {
+ page_fullness > HEAP->map_space()->AreaSize()) {
i = RoundUp(i, Page::kPageSize);
map_last = NULL;
}
@@ -653,7 +654,7 @@ TEST(LinearAllocation) {
map_last = obj;
}
- if (size > Page::kObjectAreaSize) {
+ if (size > Page::kMaxNonCodeHeapObjectSize) {
// Support for reserving space in large object space is not there yet,
// but using an always-allocate scope is fine for now.
AlwaysAllocateScope always;
diff --git a/src/3rdparty/v8/test/cctest/test-sockets.cc b/src/3rdparty/v8/test/cctest/test-sockets.cc
index 4af55db..ad73540 100644
--- a/src/3rdparty/v8/test/cctest/test-sockets.cc
+++ b/src/3rdparty/v8/test/cctest/test-sockets.cc
@@ -129,7 +129,7 @@ TEST(Socket) {
bool ok;
// Initialize socket support.
- ok = Socket::Setup();
+ ok = Socket::SetUp();
CHECK(ok);
// Send and receive some data.
diff --git a/src/3rdparty/v8/test/cctest/test-spaces.cc b/src/3rdparty/v8/test/cctest/test-spaces.cc
index ee60086..0e95704 100644
--- a/src/3rdparty/v8/test/cctest/test-spaces.cc
+++ b/src/3rdparty/v8/test/cctest/test-spaces.cc
@@ -125,14 +125,14 @@ class TestMemoryAllocatorScope {
TEST(MemoryAllocator) {
- OS::Setup();
+ OS::SetUp();
Isolate* isolate = Isolate::Current();
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(isolate->heap()->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
- CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
int total_pages = 0;
@@ -140,8 +140,8 @@ TEST(MemoryAllocator) {
heap->MaxReserved(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
- Page* first_page =
- memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
+ Page* first_page = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
first_page->InsertAfter(faked_space.anchor()->prev_page());
CHECK(first_page->is_valid());
@@ -153,8 +153,8 @@ TEST(MemoryAllocator) {
}
// Again, we should get n or n - 1 pages.
- Page* other =
- memory_allocator->AllocatePage(&faked_space, NOT_EXECUTABLE);
+ Page* other = memory_allocator->AllocatePage(
+ faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
CHECK(other->is_valid());
total_pages++;
other->InsertAfter(first_page);
@@ -175,25 +175,26 @@ TEST(MemoryAllocator) {
TEST(NewSpace) {
- OS::Setup();
+ OS::SetUp();
Isolate* isolate = Isolate::Current();
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
- CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
NewSpace new_space(heap);
- CHECK(new_space.Setup(HEAP->ReservedSemiSpaceSize(),
+ CHECK(new_space.SetUp(HEAP->ReservedSemiSpaceSize(),
HEAP->ReservedSemiSpaceSize()));
- CHECK(new_space.HasBeenSetup());
+ CHECK(new_space.HasBeenSetUp());
- while (new_space.Available() >= Page::kMaxHeapObjectSize) {
+ while (new_space.Available() >= Page::kMaxNonCodeHeapObjectSize) {
Object* obj =
- new_space.AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked();
+ new_space.AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->
+ ToObjectUnchecked();
CHECK(new_space.Contains(HeapObject::cast(obj)));
}
@@ -204,13 +205,13 @@ TEST(NewSpace) {
TEST(OldSpace) {
- OS::Setup();
+ OS::SetUp();
Isolate* isolate = Isolate::Current();
isolate->InitializeLoggingAndCounters();
Heap* heap = isolate->heap();
CHECK(heap->ConfigureHeapDefault());
MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
- CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ CHECK(memory_allocator->SetUp(heap->MaxReserved(),
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
@@ -220,10 +221,10 @@ TEST(OldSpace) {
NOT_EXECUTABLE);
CHECK(s != NULL);
- CHECK(s->Setup());
+ CHECK(s->SetUp());
while (s->Available() > 0) {
- s->AllocateRaw(Page::kMaxHeapObjectSize)->ToObjectUnchecked();
+ s->AllocateRaw(Page::kMaxNonCodeHeapObjectSize)->ToObjectUnchecked();
}
s->TearDown();
diff --git a/src/3rdparty/v8/test/cctest/test-strings.cc b/src/3rdparty/v8/test/cctest/test-strings.cc
index 93f7588..e2a179f 100644
--- a/src/3rdparty/v8/test/cctest/test-strings.cc
+++ b/src/3rdparty/v8/test/cctest/test-strings.cc
@@ -355,7 +355,7 @@ TEST(ExternalShortStringAdd) {
// Make sure we cover all always-flat lengths and at least one above.
static const int kMaxLength = 20;
- CHECK_GT(kMaxLength, i::String::kMinNonFlatLength);
+ CHECK_GT(kMaxLength, i::ConsString::kMinLength);
// Allocate two JavaScript arrays for holding short strings.
v8::Handle<v8::Array> ascii_external_strings =
@@ -587,3 +587,36 @@ TEST(SliceFromSlice) {
CHECK(SlicedString::cast(*string)->parent()->IsSeqString());
CHECK_EQ("cdefghijklmnopqrstuvwx", *(string->ToCString()));
}
+
+
+TEST(AsciiArrayJoin) {
+ // Set heap limits.
+ static const int K = 1024;
+ v8::ResourceConstraints constraints;
+ constraints.set_max_young_space_size(256 * K);
+ constraints.set_max_old_space_size(4 * K * K);
+ v8::SetResourceConstraints(&constraints);
+
+ // String s is made of 2^17 = 131072 'c' characters and a is an array
+ // starting with 'bad', followed by 2^14 times the string s. That means the
+ // total length of the concatenated strings is 2^31 + 3. So on 32bit systems
+ // summing the lengths of the strings (as Smis) overflows and wraps.
+ static const char* join_causing_out_of_memory =
+ "var two_14 = Math.pow(2, 14);"
+ "var two_17 = Math.pow(2, 17);"
+ "var s = Array(two_17 + 1).join('c');"
+ "var a = ['bad'];"
+ "for (var i = 1; i <= two_14; i++) a.push(s);"
+ "a.join("");";
+
+ v8::HandleScope scope;
+ LocalContext context;
+ v8::V8::IgnoreOutOfMemoryException();
+ v8::Local<v8::Script> script =
+ v8::Script::Compile(v8::String::New(join_causing_out_of_memory));
+ v8::Local<v8::Value> result = script->Run();
+
+ // Check for out of memory state.
+ CHECK(result.IsEmpty());
+ CHECK(context->HasOutOfMemoryException());
+}
diff --git a/src/3rdparty/v8/test/cctest/test-thread-termination.cc b/src/3rdparty/v8/test/cctest/test-thread-termination.cc
index 1aa57e3..cebabaa 100644
--- a/src/3rdparty/v8/test/cctest/test-thread-termination.cc
+++ b/src/3rdparty/v8/test/cctest/test-thread-termination.cc
@@ -255,6 +255,10 @@ TEST(TerminateMultipleV8ThreadsDefaultIsolate) {
threads[i]->Join();
delete threads[i];
}
+ {
+ v8::Locker locker;
+ v8::Locker::StopPreemption();
+ }
delete semaphore;
semaphore = NULL;
diff --git a/src/3rdparty/v8/test/cctest/test-utils.cc b/src/3rdparty/v8/test/cctest/test-utils.cc
index e4f70df..df8ff72 100644
--- a/src/3rdparty/v8/test/cctest/test-utils.cc
+++ b/src/3rdparty/v8/test/cctest/test-utils.cc
@@ -105,7 +105,7 @@ void TestMemCopy(Vector<byte> src,
TEST(MemCopy) {
v8::V8::Initialize();
- OS::Setup();
+ OS::SetUp();
const int N = OS::kMinComplexMemCopy + 128;
Vector<byte> buffer1 = Vector<byte>::New(N);
Vector<byte> buffer2 = Vector<byte>::New(N);
diff --git a/src/3rdparty/v8/test/es5conform/es5conform.status b/src/3rdparty/v8/test/es5conform/es5conform.status
index bf3ee8b..12ebf90 100644
--- a/src/3rdparty/v8/test/es5conform/es5conform.status
+++ b/src/3rdparty/v8/test/es5conform/es5conform.status
@@ -314,8 +314,3 @@ chapter15/15.3/15.3.2/15.3.2.1/15.3.2.1-11-6-s: FAIL
# Array.prototype.reduce - null passed as thisValue to strict callbackfn
# Invalid test case: http://es5conform.codeplex.com/workitem/29085
chapter15/15.4/15.4.4/15.4.4.21/15.4.4.21-9-c-ii-4-s: FAIL
-
-[ $arch == mips ]
-
-# Skip all tests on MIPS.
-*: SKIP
diff --git a/src/3rdparty/v8/test/message/message.status b/src/3rdparty/v8/test/message/message.status
index 70354ce..fc2896b 100644
--- a/src/3rdparty/v8/test/message/message.status
+++ b/src/3rdparty/v8/test/message/message.status
@@ -29,10 +29,3 @@ prefix message
# All tests in the bug directory are expected to fail.
bugs: FAIL
-
-
-##############################################################################
-[ $arch == mips ]
-
-# Skip all tests on MIPS.
-*: SKIP
diff --git a/src/3rdparty/v8/test/mjsunit/array-construct-transition.js b/src/3rdparty/v8/test/mjsunit/array-construct-transition.js
new file mode 100644
index 0000000..577e321
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/array-construct-transition.js
@@ -0,0 +1,39 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays
+
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+
+if (support_smi_only_arrays) {
+ var a = new Array(0, 1, 2);
+ assertTrue(%HasFastSmiOnlyElements(a));
+ var b = new Array(0.5, 1.2, 2.3);
+ assertTrue(%HasFastDoubleElements(b));
+ var c = new Array(0.5, 1.2, new Object());
+ assertTrue(%HasFastElements(c));
+}
diff --git a/src/3rdparty/v8/test/mjsunit/array-join.js b/src/3rdparty/v8/test/mjsunit/array-join.js
index 5c837a5..c08c182 100644
--- a/src/3rdparty/v8/test/mjsunit/array-join.js
+++ b/src/3rdparty/v8/test/mjsunit/array-join.js
@@ -75,10 +75,10 @@ if (Array.prototype.toString != oldToString) {
Array.prototype.toString = oldToString;
}
-var a = new Array(123123123);
-assertEquals(123123122, String(a).length);
-assertEquals(123123122, a.join(",").length);
-assertEquals(246246244, a.join("oo").length);
+var a = new Array(123123);
+assertEquals(123122, String(a).length);
+assertEquals(123122, a.join(",").length);
+assertEquals(246244, a.join("oo").length);
a = new Array(Math.pow(2,32) - 1); // Max length.
assertEquals("", a.join(""));
@@ -90,4 +90,4 @@ a = new Array(100001);
for (var i = 0; i < a.length; i++) a[i] = undefined;
a[5] = "ab";
a[90000] = "cd";
-assertEquals("abcd", a.join("")); // Must not throw. \ No newline at end of file
+assertEquals("abcd", a.join("")); // Must not throw.
diff --git a/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js b/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
index 321340c..f657525 100644
--- a/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
+++ b/src/3rdparty/v8/test/mjsunit/array-literal-transitions.js
@@ -33,7 +33,13 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements(new Array());
+support_smi_only_arrays = %HasFastSmiOnlyElements([1,2,3,4,5,6,7,8,9,10]);
+
+if (support_smi_only_arrays) {
+ print("Tests include smi-only arrays.");
+} else {
+ print("Tests do NOT include smi-only arrays.");
+}
// IC and Crankshaft support for smi-only elements in dynamic array literals.
function get(foo) { return foo; } // Used to generate dynamic values.
@@ -122,4 +128,83 @@ if (support_smi_only_arrays) {
}
%OptimizeFunctionOnNextCall(test_large_literal);
test_large_literal();
+
+ function deopt_array(use_literal) {
+ if (use_literal) {
+ return [.5, 3, 4];
+ } else {
+ return new Array();
+ }
+ }
+
+ deopt_array(false);
+ deopt_array(false);
+ deopt_array(false);
+ %OptimizeFunctionOnNextCall(deopt_array);
+ var array = deopt_array(false);
+ assertTrue(2 != %GetOptimizationStatus(deopt_array));
+ deopt_array(true);
+ assertTrue(2 != %GetOptimizationStatus(deopt_array));
+ array = deopt_array(false);
+ assertTrue(2 != %GetOptimizationStatus(deopt_array));
+
+ // Check that unexpected changes in the objects stored into the boilerplate
+ // also force a deopt.
+ function deopt_array_literal_all_smis(a) {
+ return [0, 1, a];
+ }
+
+ deopt_array_literal_all_smis(2);
+ deopt_array_literal_all_smis(3);
+ deopt_array_literal_all_smis(4);
+ array = deopt_array_literal_all_smis(4);
+ assertEquals(0, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(4, array[2]);
+ %OptimizeFunctionOnNextCall(deopt_array_literal_all_smis);
+ array = deopt_array_literal_all_smis(5);
+ array = deopt_array_literal_all_smis(6);
+ assertTrue(2 != %GetOptimizationStatus(deopt_array_literal_all_smis));
+ assertEquals(0, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(6, array[2]);
+
+ array = deopt_array_literal_all_smis(.5);
+ assertTrue(1 != %GetOptimizationStatus(deopt_array_literal_all_smis));
+ assertEquals(0, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(.5, array[2]);
+
+ function deopt_array_literal_all_doubles(a) {
+ return [0.5, 1, a];
+ }
+
+ deopt_array_literal_all_doubles(.5);
+ deopt_array_literal_all_doubles(.5);
+ deopt_array_literal_all_doubles(.5);
+ array = deopt_array_literal_all_doubles(0.5);
+ assertEquals(0.5, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(0.5, array[2]);
+ %OptimizeFunctionOnNextCall(deopt_array_literal_all_doubles);
+ array = deopt_array_literal_all_doubles(5);
+ array = deopt_array_literal_all_doubles(6);
+ assertTrue(2 != %GetOptimizationStatus(deopt_array_literal_all_doubles));
+ assertEquals(0.5, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(6, array[2]);
+
+ var foo = new Object();
+ array = deopt_array_literal_all_doubles(foo);
+ assertTrue(1 != %GetOptimizationStatus(deopt_array_literal_all_doubles));
+ assertEquals(0.5, array[0]);
+ assertEquals(1, array[1]);
+ assertEquals(foo, array[2]);
}
+
+(function literals_after_osr() {
+ var color = [0];
+ // Trigger OSR.
+ while (%GetOptimizationStatus(literals_after_osr) == 2) {}
+ return [color[0]];
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/array-store-and-grow.js b/src/3rdparty/v8/test/mjsunit/array-store-and-grow.js
new file mode 100644
index 0000000..131d4eb
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/array-store-and-grow.js
@@ -0,0 +1,183 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Verifies that the KeyedStoreIC correctly handles out-of-bounds stores
+// to an array that grow it by a single element. Test functions are
+// called twice to make sure that the IC is used, first call is handled
+// by the runtime in the miss stub.
+
+function array_store_1(a,b,c) {
+ return (a[b] = c);
+}
+
+// Check handling of the empty array.
+var a = [];
+array_store_1(a, 0, 1);
+a = [];
+array_store_1(a, 0, 1);
+assertEquals(1, a[0]);
+assertEquals(1, array_store_1([], 0, 1));
+
+a = [];
+for (x=0;x<100000;++x) {
+ assertEquals(x, array_store_1(a, x, x));
+}
+
+for (x=0;x<100000;++x) {
+ assertEquals(x, array_store_1([], 0, x));
+}
+
+function array_store_2(a,b,c) {
+ return (a[b] = c);
+}
+
+a = [];
+array_store_2(a, 0, 0.5);
+a = [];
+array_store_2(a, 0, 0.5);
+assertEquals(0.5, a[0]);
+assertEquals(0.5, array_store_2([], 0, 0.5));
+
+function array_store_3(a,b,c) {
+ return (a[b] = c);
+}
+
+x = new Object();
+a = [];
+array_store_3(a, 0, x);
+a = [];
+array_store_3(a, 0, x);
+assertEquals(x, a[0]);
+assertEquals(x, array_store_3([], 0, x));
+
+// Check the handling of COW arrays
+function makeCOW() {
+ return [1];
+}
+
+function array_store_4(a,b,c) {
+ return (a[b] = c);
+}
+
+a = makeCOW();
+array_store_4(a, 1, 1);
+a = makeCOW();
+array_store_4(a, 1, 1);
+assertEquals(1, a[1]);
+assertEquals(1, array_store_4([], 1, 1));
+
+function array_store_5(a,b,c) {
+ return (a[b] = c);
+}
+
+a = makeCOW();
+array_store_5(a, 1, 0.5);
+a = makeCOW();
+array_store_5(a, 1, 0.5);
+assertEquals(0.5, a[1]);
+assertEquals(0.5, array_store_5([], 1, 0.5));
+
+function array_store_6(a,b,c) {
+ return (a[b] = c);
+}
+
+a = makeCOW();
+array_store_6(a, 1, x);
+a = makeCOW();
+array_store_6(a, 1, x);
+assertEquals(x, a[1]);
+assertEquals(x, array_store_6([], 1, x));
+
+// Check the handling of mutable arrays.
+a = new Array(1,2,3);
+array_store_4(a, 3, 1);
+a = new Array(1,2,3);
+array_store_4(a, 3, 1);
+assertEquals(1, a[3]);
+assertEquals(1, array_store_4([], 3, 1));
+
+function array_store_5(a,b,c) {
+ return (a[b] = c);
+}
+
+a = new Array(1,2,3);
+array_store_5(a, 3, 0.5);
+a = new Array(1,2,3);
+array_store_5(a, 3, 0.5);
+assertEquals(0.5, a[3]);
+assertEquals(0.5, array_store_5([], 3, 0.5));
+
+function array_store_6(a,b,c) {
+ return (a[b] = c);
+}
+
+a = new Array(1,2,3);
+array_store_6(a, 3, x);
+a = new Array(1,2,3);
+array_store_6(a, 3, x);
+assertEquals(x, a[3]);
+assertEquals(x, array_store_6([], 3, x));
+
+function array_store_7(a,b,c) {
+ return (a[b] = c);
+}
+
+// Check the handling of mutable arrays of doubles
+var a = new Array(0.5, 1.5);
+array_store_7(a, 2, .5);
+a = new Array(0.5, 1.5);
+array_store_7(a, 2, .5);
+assertEquals(0.5, a[2]);
+a = new Array(0.5, 1.5);
+assertEquals(0.5, array_store_7(a, 2, 0.5));
+
+for (x=0;x<100000;++x) {
+ a = new Array(0.5, 1.5);
+ assertEquals(x, array_store_7(a, 2, x));
+}
+
+function array_store_8(a,b,c) {
+ return (a[b] = c);
+}
+
+var a = new Array(0.5, 1.5);
+array_store_8(a, 2, .5);
+a = new Array(0.5, 1.5);
+array_store_8(a, 10, .5);
+assertEquals(0.5, a[10]);
+
+// Grow the empty array with a double store.
+function array_store_9(a,b,c) {
+ return (a[b] = c);
+}
+
+var a = [];
+array_store_9(a, 0, 0.5);
+a = [];
+array_store_1(a, 0, 0.5);
+assertEquals(0.5, a[0]);
+assertEquals(0.5, array_store_1([], 0, 0.5));
diff --git a/src/3rdparty/v8/test/mjsunit/bugs/bug-618.js b/src/3rdparty/v8/test/mjsunit/bugs/bug-618.js
index ae84326..0513f87 100644
--- a/src/3rdparty/v8/test/mjsunit/bugs/bug-618.js
+++ b/src/3rdparty/v8/test/mjsunit/bugs/bug-618.js
@@ -42,4 +42,4 @@ function C() {
assertEquals(23, new C().x);
C.prototype.__defineSetter__('x', function(value) { this.y = 23; });
-assertEquals(void 0, new C().x));
+assertEquals(void 0, new C().x);
diff --git a/src/3rdparty/v8/test/mjsunit/builtins.js b/src/3rdparty/v8/test/mjsunit/builtins.js
index f2ad544..e43b589 100644
--- a/src/3rdparty/v8/test/mjsunit/builtins.js
+++ b/src/3rdparty/v8/test/mjsunit/builtins.js
@@ -27,8 +27,7 @@
// Flags: --expose-natives-as=builtins
-// Checks that all function properties of the builtin object are neither
-// writable nor configurable. Also, theose functions that are actually
+// Checks that all function properties of the builtin object that are actually
// constructors (recognized by having properties on their .prototype object),
// have only unconfigurable properties on the prototype, and the methods
// are also non-writable.
@@ -75,8 +74,6 @@ for (var i = 0; i < names.length; i++) {
assertTrue(desc.hasOwnProperty("value"));
var value = desc.value;
if (isFunction(value)) {
- assertFalse(desc.writable, name);
- assertFalse(desc.configurable, name);
checkConstructor(value, name);
}
}
diff --git a/src/3rdparty/v8/test/mjsunit/comparison-ops-and-undefined.js b/src/3rdparty/v8/test/mjsunit/comparison-ops-and-undefined.js
new file mode 100644
index 0000000..06db076
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/comparison-ops-and-undefined.js
@@ -0,0 +1,128 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function test_helper_for_ics(func, b1, b2, b3, b4) {
+ assertEquals(b1, func(.5, .5));
+ assertEquals(b2, func(.5, undefined));
+ assertEquals(b3, func(undefined, .5));
+ assertEquals(b4, func(undefined, undefined));
+}
+
+function test_helper_for_crankshaft(func, b1, b2, b3, b4) {
+ assertEquals(b1, func(.5, .5));
+ %OptimizeFunctionOnNextCall(func);
+ assertEquals(b1, func(.5, .5));
+ assertEquals(b2, func(.5, undefined));
+ assertEquals(b3, func(undefined, .5));
+ assertEquals(b4, func(undefined, undefined));
+}
+
+function less_1(a, b) {
+ return a < b;
+}
+
+test_helper_for_ics(less_1, false, false, false, false);
+
+function less_2(a, b) {
+ return a < b;
+}
+
+test_helper_for_crankshaft(less_1, false, false, false, false);
+
+function greater_1(a, b) {
+ return a > b;
+}
+
+test_helper_for_ics(greater_1, false, false, false, false);
+
+function greater_2(a, b) {
+ return a > b;
+}
+
+test_helper_for_crankshaft(greater_1, false, false, false, false);
+
+function less_equal_1(a, b) {
+ return a <= b;
+}
+
+test_helper_for_ics(less_equal_1, true, false, false, false);
+
+function less_equal_2(a, b) {
+ return a <= b;
+}
+
+test_helper_for_crankshaft(less_equal_1, true, false, false, false);
+
+function greater_equal_1(a, b) {
+ return a >= b;
+}
+
+test_helper_for_ics(greater_equal_1, true, false, false, false);
+
+function greater_equal_2(a, b) {
+ return a >= b;
+}
+
+test_helper_for_crankshaft(greater_equal_1, true, false, false, false);
+
+function equal_1(a, b) {
+ return a == b;
+}
+
+test_helper_for_ics(equal_1, true, false, false, true);
+
+function equal_2(a, b) {
+ return a == b;
+}
+
+test_helper_for_crankshaft(equal_2, true, false, false, true);
+
+function strict_equal_1(a, b) {
+ return a === b;
+}
+
+test_helper_for_ics(strict_equal_1, true, false, false, true);
+
+function strict_equal_2(a, b) {
+ return a === b;
+}
+
+test_helper_for_crankshaft(strict_equal_2, true, false, false, true);
+
+function not_equal_1(a, b) {
+ return a != b;
+}
+
+test_helper_for_ics(not_equal_1, false, true, true, false);
+
+function not_equal_2(a, b) {
+ return a != b;
+}
+
+test_helper_for_crankshaft(not_equal_2, false, true, true, false);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js
new file mode 100644
index 0000000..d6d9f1b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object-huge.js
@@ -0,0 +1,308 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --inline-construct --nolimit-inlining
+
+// Test that huge constructors (more than 256 this assignments) are
+// handled correctly.
+
+// Test huge constructor when being inlined into hydrogen.
+function test() {
+ return new huge();
+}
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+var o = test();
+assertEquals(1, o.foo1);
+assertEquals(257, o.foo257);
+
+// Test huge constructor with specialized constructor stub.
+var o = new huge();
+assertEquals(1, o.foo1);
+assertEquals(257, o.foo257);
+
+// The huge constructor, nothing interesting beyond this point.
+function huge() {
+ this.foo1 = 1;
+ this.foo2 = 2;
+ this.foo3 = 3;
+ this.foo4 = 4;
+ this.foo5 = 5;
+ this.foo6 = 6;
+ this.foo7 = 7;
+ this.foo8 = 8;
+ this.foo9 = 9;
+ this.foo10 = 10;
+ this.foo11 = 11;
+ this.foo12 = 12;
+ this.foo13 = 13;
+ this.foo14 = 14;
+ this.foo15 = 15;
+ this.foo16 = 16;
+ this.foo17 = 17;
+ this.foo18 = 18;
+ this.foo19 = 19;
+ this.foo20 = 20;
+ this.foo21 = 21;
+ this.foo22 = 22;
+ this.foo23 = 23;
+ this.foo24 = 24;
+ this.foo25 = 25;
+ this.foo26 = 26;
+ this.foo27 = 27;
+ this.foo28 = 28;
+ this.foo29 = 29;
+ this.foo30 = 30;
+ this.foo31 = 31;
+ this.foo32 = 32;
+ this.foo33 = 33;
+ this.foo34 = 34;
+ this.foo35 = 35;
+ this.foo36 = 36;
+ this.foo37 = 37;
+ this.foo38 = 38;
+ this.foo39 = 39;
+ this.foo40 = 40;
+ this.foo41 = 41;
+ this.foo42 = 42;
+ this.foo43 = 43;
+ this.foo44 = 44;
+ this.foo45 = 45;
+ this.foo46 = 46;
+ this.foo47 = 47;
+ this.foo48 = 48;
+ this.foo49 = 49;
+ this.foo50 = 50;
+ this.foo51 = 51;
+ this.foo52 = 52;
+ this.foo53 = 53;
+ this.foo54 = 54;
+ this.foo55 = 55;
+ this.foo56 = 56;
+ this.foo57 = 57;
+ this.foo58 = 58;
+ this.foo59 = 59;
+ this.foo60 = 60;
+ this.foo61 = 61;
+ this.foo62 = 62;
+ this.foo63 = 63;
+ this.foo64 = 64;
+ this.foo65 = 65;
+ this.foo66 = 66;
+ this.foo67 = 67;
+ this.foo68 = 68;
+ this.foo69 = 69;
+ this.foo70 = 70;
+ this.foo71 = 71;
+ this.foo72 = 72;
+ this.foo73 = 73;
+ this.foo74 = 74;
+ this.foo75 = 75;
+ this.foo76 = 76;
+ this.foo77 = 77;
+ this.foo78 = 78;
+ this.foo79 = 79;
+ this.foo80 = 80;
+ this.foo81 = 81;
+ this.foo82 = 82;
+ this.foo83 = 83;
+ this.foo84 = 84;
+ this.foo85 = 85;
+ this.foo86 = 86;
+ this.foo87 = 87;
+ this.foo88 = 88;
+ this.foo89 = 89;
+ this.foo90 = 90;
+ this.foo91 = 91;
+ this.foo92 = 92;
+ this.foo93 = 93;
+ this.foo94 = 94;
+ this.foo95 = 95;
+ this.foo96 = 96;
+ this.foo97 = 97;
+ this.foo98 = 98;
+ this.foo99 = 99;
+ this.foo100 = 100;
+ this.foo101 = 101;
+ this.foo102 = 102;
+ this.foo103 = 103;
+ this.foo104 = 104;
+ this.foo105 = 105;
+ this.foo106 = 106;
+ this.foo107 = 107;
+ this.foo108 = 108;
+ this.foo109 = 109;
+ this.foo110 = 110;
+ this.foo111 = 111;
+ this.foo112 = 112;
+ this.foo113 = 113;
+ this.foo114 = 114;
+ this.foo115 = 115;
+ this.foo116 = 116;
+ this.foo117 = 117;
+ this.foo118 = 118;
+ this.foo119 = 119;
+ this.foo120 = 120;
+ this.foo121 = 121;
+ this.foo122 = 122;
+ this.foo123 = 123;
+ this.foo124 = 124;
+ this.foo125 = 125;
+ this.foo126 = 126;
+ this.foo127 = 127;
+ this.foo128 = 128;
+ this.foo129 = 129;
+ this.foo130 = 130;
+ this.foo131 = 131;
+ this.foo132 = 132;
+ this.foo133 = 133;
+ this.foo134 = 134;
+ this.foo135 = 135;
+ this.foo136 = 136;
+ this.foo137 = 137;
+ this.foo138 = 138;
+ this.foo139 = 139;
+ this.foo140 = 140;
+ this.foo141 = 141;
+ this.foo142 = 142;
+ this.foo143 = 143;
+ this.foo144 = 144;
+ this.foo145 = 145;
+ this.foo146 = 146;
+ this.foo147 = 147;
+ this.foo148 = 148;
+ this.foo149 = 149;
+ this.foo150 = 150;
+ this.foo151 = 151;
+ this.foo152 = 152;
+ this.foo153 = 153;
+ this.foo154 = 154;
+ this.foo155 = 155;
+ this.foo156 = 156;
+ this.foo157 = 157;
+ this.foo158 = 158;
+ this.foo159 = 159;
+ this.foo160 = 160;
+ this.foo161 = 161;
+ this.foo162 = 162;
+ this.foo163 = 163;
+ this.foo164 = 164;
+ this.foo165 = 165;
+ this.foo166 = 166;
+ this.foo167 = 167;
+ this.foo168 = 168;
+ this.foo169 = 169;
+ this.foo170 = 170;
+ this.foo171 = 171;
+ this.foo172 = 172;
+ this.foo173 = 173;
+ this.foo174 = 174;
+ this.foo175 = 175;
+ this.foo176 = 176;
+ this.foo177 = 177;
+ this.foo178 = 178;
+ this.foo179 = 179;
+ this.foo180 = 180;
+ this.foo181 = 181;
+ this.foo182 = 182;
+ this.foo183 = 183;
+ this.foo184 = 184;
+ this.foo185 = 185;
+ this.foo186 = 186;
+ this.foo187 = 187;
+ this.foo188 = 188;
+ this.foo189 = 189;
+ this.foo190 = 190;
+ this.foo191 = 191;
+ this.foo192 = 192;
+ this.foo193 = 193;
+ this.foo194 = 194;
+ this.foo195 = 195;
+ this.foo196 = 196;
+ this.foo197 = 197;
+ this.foo198 = 198;
+ this.foo199 = 199;
+ this.foo200 = 200;
+ this.foo201 = 201;
+ this.foo202 = 202;
+ this.foo203 = 203;
+ this.foo204 = 204;
+ this.foo205 = 205;
+ this.foo206 = 206;
+ this.foo207 = 207;
+ this.foo208 = 208;
+ this.foo209 = 209;
+ this.foo210 = 210;
+ this.foo211 = 211;
+ this.foo212 = 212;
+ this.foo213 = 213;
+ this.foo214 = 214;
+ this.foo215 = 215;
+ this.foo216 = 216;
+ this.foo217 = 217;
+ this.foo218 = 218;
+ this.foo219 = 219;
+ this.foo220 = 220;
+ this.foo221 = 221;
+ this.foo222 = 222;
+ this.foo223 = 223;
+ this.foo224 = 224;
+ this.foo225 = 225;
+ this.foo226 = 226;
+ this.foo227 = 227;
+ this.foo228 = 228;
+ this.foo229 = 229;
+ this.foo230 = 230;
+ this.foo231 = 231;
+ this.foo232 = 232;
+ this.foo233 = 233;
+ this.foo234 = 234;
+ this.foo235 = 235;
+ this.foo236 = 236;
+ this.foo237 = 237;
+ this.foo238 = 238;
+ this.foo239 = 239;
+ this.foo240 = 240;
+ this.foo241 = 241;
+ this.foo242 = 242;
+ this.foo243 = 243;
+ this.foo244 = 244;
+ this.foo245 = 245;
+ this.foo246 = 246;
+ this.foo247 = 247;
+ this.foo248 = 248;
+ this.foo249 = 249;
+ this.foo250 = 250;
+ this.foo251 = 251;
+ this.foo252 = 252;
+ this.foo253 = 253;
+ this.foo254 = 254;
+ this.foo255 = 255;
+ this.foo256 = 256;
+ this.foo257 = 257;
+}
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/alloc-object.js b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object.js
new file mode 100644
index 0000000..1d44efb
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/alloc-object.js
@@ -0,0 +1,90 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc --inline-construct
+
+// Test that inlined object allocation works for different layouts of
+// objects (e.g. in object properties, slack tracking in progress or
+// changing of function prototypes)
+
+function test_helper(construct, a, b) {
+ return new construct(a, b);
+}
+
+function test(construct) {
+ %DeoptimizeFunction(test);
+ test_helper(construct, 0, 0);
+ test_helper(construct, 0, 0);
+ %OptimizeFunctionOnNextCall(test_helper);
+ // Test adding a new property after allocation was inlined.
+ var o = test_helper(construct, 1, 2);
+ o.z = 3;
+ assertEquals(1, o.x);
+ assertEquals(2, o.y);
+ assertEquals(3, o.z);
+ // Test changing the prototype after allocation was inlined.
+ construct.prototype = { z:6 };
+ var o = test_helper(construct, 4, 5);
+ assertEquals(4, o.x);
+ assertEquals(5, o.y);
+ assertEquals(6, o.z);
+ %DeoptimizeFunction(test_helper);
+ gc(); // Makes V8 forget about type information for test_helper.
+}
+
+function finalize_slack_tracking(construct) {
+ // Value chosen based on kGenerousAllocationCount = 8.
+ for (var i = 0; i < 8; i++) {
+ new construct(0, 0);
+ }
+}
+
+
+// Both properties are pre-allocated in object properties.
+function ConstructInObjectPreAllocated(a, b) {
+ this.x = a;
+ this.y = b;
+}
+finalize_slack_tracking(ConstructInObjectPreAllocated);
+test(ConstructInObjectPreAllocated);
+
+
+// Both properties are unused in object properties.
+function ConstructInObjectUnused(a, b) {
+ this.x = a < 0 ? 0 : a;
+ this.y = b > 0 ? b : 0;
+}
+finalize_slack_tracking(ConstructInObjectUnused);
+test(ConstructInObjectUnused);
+
+
+// Test inlined allocation while slack tracking is still in progress.
+function ConstructWhileSlackTracking(a, b) {
+ this.x = a;
+ this.y = b;
+}
+test(ConstructWhileSlackTracking);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
index 532fc26..b6adf7f 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-arguments.js
@@ -27,11 +27,89 @@
// Flags: --allow-natives-syntax
-// Test inlining functions that use arguments.
-function f() { return g(1, 2, 3); }
+function A() {
+}
-function g(x, y, z) { return %_ArgumentsLength(); }
+A.prototype.X = function (a, b, c) {
+ assertTrue(this instanceof A);
+ assertEquals(1, a);
+ assertEquals(2, b);
+ assertEquals(3, c);
+};
-for (var i = 0; i < 5; ++i) f();
-%OptimizeFunctionOnNextCall(f);
-assertEquals(3, f());
+A.prototype.Y = function () {
+ this.X.apply(this, arguments);
+};
+
+A.prototype.Z = function () {
+ this.Y(1,2,3);
+};
+
+var a = new A();
+a.Z(4,5,6);
+a.Z(4,5,6);
+%OptimizeFunctionOnNextCall(a.Z);
+a.Z(4,5,6);
+A.prototype.X.apply = function (receiver, args) {
+ return Function.prototype.apply.call(this, receiver, args);
+};
+a.Z(4,5,6);
+
+
+// Ensure that HArgumentsObject is inserted in a correct place
+// and dominates all uses.
+function F1() { }
+function F2() { F1.apply(this, arguments); }
+function F3(x, y) {
+ if (x) {
+ F2(y);
+ }
+}
+
+function F31() {
+ return F1.apply(this, arguments);
+}
+
+function F4() {
+ F3(true, false);
+ return F31(1);
+}
+
+F4(1);
+F4(1);
+F4(1);
+%OptimizeFunctionOnNextCall(F4);
+F4(1);
+
+
+// Test correct adapation of arguments.
+// Strict mode prevents arguments object from shadowing parameters.
+(function () {
+ "use strict";
+
+ function G2() {
+ assertArrayEquals([1,2], arguments);
+ }
+
+ function G4() {
+ assertArrayEquals([1,2,3,4], arguments);
+ }
+
+ function adapt2to4(a, b, c, d) {
+ G2.apply(this, arguments);
+ }
+
+ function adapt4to2(a, b) {
+ G4.apply(this, arguments);
+ }
+
+ function test_adaptation() {
+ adapt2to4(1, 2);
+ adapt4to2(1, 2, 3, 4);
+ }
+
+ test_adaptation();
+ test_adaptation();
+ %OptimizeFunctionOnNextCall(test_adaptation);
+ test_adaptation();
+})();
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-arity-mismatch.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-arity-mismatch.js
new file mode 100644
index 0000000..4a61fa3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-arity-mismatch.js
@@ -0,0 +1,62 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test inlining at call sites with mismatched arity.
+
+function f(a) {
+ return a.x;
+}
+
+function g(a, b) {
+ return a.x;
+}
+
+function h1(a, b) {
+ return f(a, a) * g(b);
+}
+
+function h2(a, b) {
+ return f(a, a) * g(b);
+}
+
+
+var o = {x: 2};
+
+assertEquals(4, h1(o, o));
+assertEquals(4, h1(o, o));
+assertEquals(4, h2(o, o));
+assertEquals(4, h2(o, o));
+%OptimizeFunctionOnNextCall(h1);
+%OptimizeFunctionOnNextCall(h2);
+assertEquals(4, h1(o, o));
+assertEquals(4, h2(o, o));
+
+var u = {y:0, x:1};
+assertEquals(2, h1(u, o));
+assertEquals(2, h2(o, u));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js
new file mode 100644
index 0000000..af9e69c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-construct.js
@@ -0,0 +1,152 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc --inline-construct
+
+// Test inlining of constructor calls.
+
+function TestInlinedConstructor(closure) {
+ var result;
+ var counter = { value:0 };
+ result = closure(11, 12, counter);
+ assertEquals(23, result);
+ assertEquals(1, counter.value);
+ result = closure(23, 19, counter);
+ assertEquals(42, result);
+ assertEquals(2, counter.value);
+ %OptimizeFunctionOnNextCall(closure);
+ result = closure(1, 42, counter)
+ assertEquals(43, result);
+ assertEquals(3, counter.value);
+ result = closure("foo", "bar", counter)
+ assertEquals("foobar", result)
+ assertEquals(4, counter.value);
+}
+
+function TestInAllContexts(constructor) {
+ function value_context(a, b, counter) {
+ var obj = new constructor(a, b, counter);
+ return obj.x;
+ }
+ function test_context(a, b, counter) {
+ if (!new constructor(a, b, counter)) {
+ assertUnreachable("should not happen");
+ }
+ return a + b;
+ }
+ function effect_context(a, b, counter) {
+ new constructor(a, b, counter);
+ return a + b;
+ }
+ TestInlinedConstructor(value_context);
+ TestInlinedConstructor(test_context);
+ TestInlinedConstructor(effect_context);
+ %DeoptimizeFunction(value_context);
+ %DeoptimizeFunction(test_context);
+ %DeoptimizeFunction(effect_context);
+ gc(); // Makes V8 forget about type information for *_context.
+}
+
+
+// Test constructor returning nothing in all contexts.
+function c1(a, b, counter) {
+ this.x = a + b;
+ counter.value++;
+}
+TestInAllContexts(c1);
+
+
+// Test constructor returning an object in all contexts.
+function c2(a, b, counter) {
+ var obj = new Object();
+ obj.x = a + b;
+ counter.value++;
+ return obj;
+}
+TestInAllContexts(c2);
+
+
+// Test constructor returning a primitive value in all contexts.
+function c3(a, b, counter) {
+ this.x = a + b;
+ counter.value++;
+ return "not an object";
+}
+TestInAllContexts(c3);
+
+
+// Test constructor called with too many arguments.
+function c_too_many(a, b) {
+ this.x = a + b;
+}
+function f_too_many(a, b, c) {
+ var obj = new c_too_many(a, b, c);
+ return obj.x;
+}
+assertEquals(23, f_too_many(11, 12, 1));
+assertEquals(42, f_too_many(23, 19, 1));
+%OptimizeFunctionOnNextCall(f_too_many);
+assertEquals(43, f_too_many(1, 42, 1));
+assertEquals("foobar", f_too_many("foo", "bar", "baz"))
+
+
+// Test constructor called with too few arguments.
+function c_too_few(a, b) {
+ assertSame(undefined, b);
+ this.x = a + 1;
+}
+function f_too_few(a) {
+ var obj = new c_too_few(a);
+ return obj.x;
+}
+assertEquals(12, f_too_few(11));
+assertEquals(24, f_too_few(23));
+%OptimizeFunctionOnNextCall(f_too_few);
+assertEquals(2, f_too_few(1));
+assertEquals("foo1", f_too_few("foo"))
+
+
+// Test constructor that cannot be inlined.
+function c_unsupported_syntax(a, b, counter) {
+ try {
+ this.x = a + b;
+ counter.value++;
+ } catch(e) {
+ throw new Error();
+ }
+}
+TestInAllContexts(c_unsupported_syntax);
+
+
+// Regression test: Inlined constructors called as functions do not get their
+// implicit receiver object set to undefined, even in strict mode.
+function c_strict(a, b, counter) {
+ "use strict";
+ this.x = a + b;
+ counter.value++;
+}
+TestInAllContexts(c_strict);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js b/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js
new file mode 100644
index 0000000..f78abe8
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/inline-literals.js
@@ -0,0 +1,50 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test that we can inline functions containing materialized literals.
+
+function o2(b, c) {
+ return { 'b':b, 'c':c, 'y':b + c };
+}
+
+function o1(a, b, c) {
+ return { 'a':a, 'x':o2(b, c) };
+}
+
+function TestObjectLiteral(a, b, c) {
+ var expected = { 'a':a, 'x':{ 'b':b, 'c':c, 'y':b + c } };
+ var result = o1(a, b, c);
+ assertEquals(expected, result, "TestObjectLiteral");
+}
+
+TestObjectLiteral(1, 2, 3);
+TestObjectLiteral(1, 2, 3);
+%OptimizeFunctionOnNextCall(TestObjectLiteral);
+TestObjectLiteral(1, 2, 3);
+TestObjectLiteral('a', 'b', 'c');
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/lazy-const-lookup.js b/src/3rdparty/v8/test/mjsunit/compiler/lazy-const-lookup.js
new file mode 100644
index 0000000..b4f15a1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/lazy-const-lookup.js
@@ -0,0 +1,41 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function outer() {
+ const x = 1;
+ function inner() {
+ return x;
+ }
+ inner();
+ %OptimizeFunctionOnNextCall(inner);
+ inner();
+}
+
+outer();
+
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/literals-optimized.js b/src/3rdparty/v8/test/mjsunit/compiler/literals-optimized.js
new file mode 100644
index 0000000..049e21a
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/literals-optimized.js
@@ -0,0 +1,121 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test optimized versions of array and object literals.
+
+function TestOptimizedLiteral(create, verify) {
+ verify(create(1, 2, 3), 1, 2, 3);
+ verify(create(3, 5, 7), 3, 5, 7);
+ %OptimizeFunctionOnNextCall(create);
+ verify(create(11, 23, 42), 11, 23, 42);
+}
+
+
+// Test shallow array literal.
+function create_arr_shallow(a, b, c) {
+ return [0, a, 0, b, 0, c];
+}
+function verify_arr_shallow(array, a, b, c) {
+ assertSame(6, array.length);
+ assertSame(0, array[0]);
+ assertSame(a, array[1]);
+ assertSame(0, array[2]);
+ assertSame(b, array[3]);
+ assertSame(0, array[4]);
+ assertSame(c, array[5]);
+}
+TestOptimizedLiteral(create_arr_shallow, verify_arr_shallow);
+
+
+// Test nested array literal.
+function create_arr_nested(a, b, c) {
+ return [[0, a], [b, c], [1, 2], 3];
+}
+function verify_arr_nested(array, a, b, c) {
+ assertSame(4, array.length);
+ assertSame(2, array[0].length);
+ assertSame(0, array[0][0]);
+ assertSame(a, array[0][1]);
+ assertSame(2, array[1].length);
+ assertSame(b, array[1][0]);
+ assertSame(c, array[1][1]);
+ assertSame(2, array[2].length);
+ assertSame(1, array[2][0]);
+ assertSame(2, array[2][1]);
+ assertSame(3, array[3]);
+}
+TestOptimizedLiteral(create_arr_nested, verify_arr_nested);
+
+
+// Test shallow object literal.
+function create_obj_shallow(a, b, c) {
+ return { x:a, y:b, z:c, v:'foo', 9:'bar' };
+}
+function verify_obj_shallow(object, a, b, c) {
+ assertSame(a, object.x);
+ assertSame(b, object.y);
+ assertSame(c, object.z);
+ assertSame('foo', object.v);
+ assertSame('bar', object[9]);
+}
+TestOptimizedLiteral(create_obj_shallow, verify_obj_shallow);
+
+
+// Test nested object literal.
+function create_obj_nested(a, b, c) {
+ return { x:{ v:a, w:b }, y:{ v:1, w:2 }, z:c, v:'foo', 9:'bar' };
+}
+function verify_obj_nested(object, a, b, c) {
+ assertSame(a, object.x.v);
+ assertSame(b, object.x.w);
+ assertSame(1, object.y.v);
+ assertSame(2, object.y.w);
+ assertSame(c, object.z);
+ assertSame('foo', object.v);
+ assertSame('bar', object[9]);
+}
+TestOptimizedLiteral(create_obj_nested, verify_obj_nested);
+
+
+// Test mixed array and object literal.
+function create_mixed_nested(a, b, c) {
+ return { x:[1, 2], y:[a, b], z:c, v:{ v:'foo' }, 9:'bar' };
+}
+function verify_mixed_nested(object, a, b, c) {
+ assertSame(2, object.x.length);
+ assertSame(1, object.x[0]);
+ assertSame(2, object.x[1]);
+ assertSame(2, object.y.length);
+ assertSame(a, object.y[0]);
+ assertSame(b, object.y[1]);
+ assertSame(c, object.z);
+ assertSame('foo', object.v.v);
+ assertSame('bar', object[9]);
+}
+TestOptimizedLiteral(create_mixed_nested, verify_mixed_nested);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/math-floor-global.js b/src/3rdparty/v8/test/mjsunit/compiler/math-floor-global.js
new file mode 100644
index 0000000..9ec183f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/math-floor-global.js
@@ -0,0 +1,161 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+// Test inlining of Math.floor when assigned to a global.
+var flo = Math.floor;
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) + '";return flo(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ testFloor(0, 0);
+ testFloor(0, zero());
+ testFloor(-0, -0);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+ testFloor(NaN, NaN);
+
+ // Ensure that a negative zero coming from Math.floor is properly handled
+ // by other operations.
+ function ifloor(x) {
+ return 1 / Math.floor(x);
+ }
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ %OptimizeFunctionOnNextCall(ifloor);
+ assertEquals(-Infinity, ifloor(-0));
+
+ testFloor(0, 0.1);
+ testFloor(0, 0.49999999999999994);
+ testFloor(0, 0.5);
+ testFloor(0, 0.7);
+ testFloor(-1, -0.1);
+ testFloor(-1, -0.49999999999999994);
+ testFloor(-1, -0.5);
+ testFloor(-1, -0.7);
+ testFloor(1, 1);
+ testFloor(1, 1.1);
+ testFloor(1, 1.5);
+ testFloor(1, 1.7);
+ testFloor(-1, -1);
+ testFloor(-2, -1.1);
+ testFloor(-2, -1.5);
+ testFloor(-2, -1.7);
+
+ testFloor(0, Number.MIN_VALUE);
+ testFloor(-1, -Number.MIN_VALUE);
+ testFloor(Number.MAX_VALUE, Number.MAX_VALUE);
+ testFloor(-Number.MAX_VALUE, -Number.MAX_VALUE);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+
+ // 2^30 is a smi boundary.
+ var two_30 = 1 << 30;
+
+ testFloor(two_30, two_30);
+ testFloor(two_30, two_30 + 0.1);
+ testFloor(two_30, two_30 + 0.5);
+ testFloor(two_30, two_30 + 0.7);
+
+ testFloor(two_30 - 1, two_30 - 1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.5);
+ testFloor(two_30 - 1, two_30 - 1 + 0.7);
+
+ testFloor(-two_30, -two_30);
+ testFloor(-two_30, -two_30 + 0.1);
+ testFloor(-two_30, -two_30 + 0.5);
+ testFloor(-two_30, -two_30 + 0.7);
+
+ testFloor(-two_30 + 1, -two_30 + 1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
+
+ // 2^52 is a precision boundary.
+ var two_52 = (1 << 30) * (1 << 22);
+
+ testFloor(two_52, two_52);
+ testFloor(two_52, two_52 + 0.1);
+ assertEquals(two_52, two_52 + 0.5);
+ testFloor(two_52, two_52 + 0.5);
+ assertEquals(two_52 + 1, two_52 + 0.7);
+ testFloor(two_52 + 1, two_52 + 0.7);
+
+ testFloor(two_52 - 1, two_52 - 1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.5);
+ testFloor(two_52 - 1, two_52 - 1 + 0.7);
+
+ testFloor(-two_52, -two_52);
+ testFloor(-two_52, -two_52 + 0.1);
+ testFloor(-two_52, -two_52 + 0.5);
+ testFloor(-two_52, -two_52 + 0.7);
+
+ testFloor(-two_52 + 1, -two_52 + 1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 50; i++) {
+ test();
+}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/math-floor-local.js b/src/3rdparty/v8/test/mjsunit/compiler/math-floor-local.js
new file mode 100644
index 0000000..e44b15c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/math-floor-local.js
@@ -0,0 +1,161 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --max-new-space-size=256 --allow-natives-syntax
+
+// Test inlining of Math.floor when assigned to a local.
+var test_id = 0;
+
+function testFloor(expect, input) {
+ var test = new Function('n',
+ '"' + (test_id++) +
+ '";var f = Math.floor; return f(n)');
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ assertEquals(expect, test(input));
+ %OptimizeFunctionOnNextCall(test);
+ assertEquals(expect, test(input));
+}
+
+function zero() {
+ var x = 0.5;
+ return (function() { return x - 0.5; })();
+}
+
+function test() {
+ testFloor(0, 0);
+ testFloor(0, zero());
+ testFloor(-0, -0);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+ testFloor(NaN, NaN);
+
+ // Ensure that a negative zero coming from Math.floor is properly handled
+ // by other operations.
+ function ifloor(x) {
+ return 1 / Math.floor(x);
+ }
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ %OptimizeFunctionOnNextCall(ifloor);
+ assertEquals(-Infinity, ifloor(-0));
+
+ testFloor(0, 0.1);
+ testFloor(0, 0.49999999999999994);
+ testFloor(0, 0.5);
+ testFloor(0, 0.7);
+ testFloor(-1, -0.1);
+ testFloor(-1, -0.49999999999999994);
+ testFloor(-1, -0.5);
+ testFloor(-1, -0.7);
+ testFloor(1, 1);
+ testFloor(1, 1.1);
+ testFloor(1, 1.5);
+ testFloor(1, 1.7);
+ testFloor(-1, -1);
+ testFloor(-2, -1.1);
+ testFloor(-2, -1.5);
+ testFloor(-2, -1.7);
+
+ testFloor(0, Number.MIN_VALUE);
+ testFloor(-1, -Number.MIN_VALUE);
+ testFloor(Number.MAX_VALUE, Number.MAX_VALUE);
+ testFloor(-Number.MAX_VALUE, -Number.MAX_VALUE);
+ testFloor(Infinity, Infinity);
+ testFloor(-Infinity, -Infinity);
+
+ // 2^30 is a smi boundary.
+ var two_30 = 1 << 30;
+
+ testFloor(two_30, two_30);
+ testFloor(two_30, two_30 + 0.1);
+ testFloor(two_30, two_30 + 0.5);
+ testFloor(two_30, two_30 + 0.7);
+
+ testFloor(two_30 - 1, two_30 - 1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.1);
+ testFloor(two_30 - 1, two_30 - 1 + 0.5);
+ testFloor(two_30 - 1, two_30 - 1 + 0.7);
+
+ testFloor(-two_30, -two_30);
+ testFloor(-two_30, -two_30 + 0.1);
+ testFloor(-two_30, -two_30 + 0.5);
+ testFloor(-two_30, -two_30 + 0.7);
+
+ testFloor(-two_30 + 1, -two_30 + 1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.1);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.5);
+ testFloor(-two_30 + 1, -two_30 + 1 + 0.7);
+
+ // 2^52 is a precision boundary.
+ var two_52 = (1 << 30) * (1 << 22);
+
+ testFloor(two_52, two_52);
+ testFloor(two_52, two_52 + 0.1);
+ assertEquals(two_52, two_52 + 0.5);
+ testFloor(two_52, two_52 + 0.5);
+ assertEquals(two_52 + 1, two_52 + 0.7);
+ testFloor(two_52 + 1, two_52 + 0.7);
+
+ testFloor(two_52 - 1, two_52 - 1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.1);
+ testFloor(two_52 - 1, two_52 - 1 + 0.5);
+ testFloor(two_52 - 1, two_52 - 1 + 0.7);
+
+ testFloor(-two_52, -two_52);
+ testFloor(-two_52, -two_52 + 0.1);
+ testFloor(-two_52, -two_52 + 0.5);
+ testFloor(-two_52, -two_52 + 0.7);
+
+ testFloor(-two_52 + 1, -two_52 + 1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.1);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.5);
+ testFloor(-two_52 + 1, -two_52 + 1 + 0.7);
+}
+
+
+// Test in a loop to cover the custom IC and GC-related issues.
+for (var i = 0; i < 50; i++) {
+ test();
+}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/optimized-for-in.js b/src/3rdparty/v8/test/mjsunit/compiler/optimized-for-in.js
new file mode 100644
index 0000000..cb8c66d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/optimized-for-in.js
@@ -0,0 +1,300 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --optimize-for-in --allow-natives-syntax
+
+// Test for-in support in Crankshaft. For simplicity this tests assumes certain
+// fixed iteration order for properties and will have to be adjusted if V8
+// stops following insertion order.
+
+
+function a(t) {
+ var result = [];
+ for (var i in t) {
+ result.push(i + t[i]);
+ }
+ return result.join('');
+}
+
+// Check that we correctly deoptimize on map check.
+function b(t) {
+ var result = [];
+ for (var i in t) {
+ result.push(i + t[i]);
+ delete t[i];
+ }
+ return result.join('');
+}
+
+// Check that we correctly deoptimize during preparation step.
+function c(t) {
+ var result = [];
+ for (var i in t) {
+ result.push(i + t[i]);
+ }
+ return result.join('');
+}
+
+// Check that we deoptimize to the place after side effect in the right state.
+function d(t) {
+ var result = [];
+ var o;
+ for (var i in (o = t())) {
+ result.push(i + o[i]);
+ }
+ return result.join('');
+}
+
+// Check that we correctly deoptimize on map check inserted for fused load.
+function e(t) {
+ var result = [];
+ for (var i in t) {
+ delete t[i];
+ t[i] = i;
+ result.push(i + t[i]);
+ }
+ return result.join('');
+}
+
+// Nested for-in loops.
+function f(t) {
+ var result = [];
+ for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ }
+ }
+ return result.join('');
+}
+
+// Deoptimization from the inner for-in loop.
+function g(t) {
+ var result = [];
+ for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ var v = t[i];
+ delete t[i];
+ t[i] = v;
+ }
+ }
+ return result.join('');
+}
+
+
+// Break from the inner for-in loop.
+function h(t, deopt) {
+ var result = [];
+ for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ break;
+ }
+ }
+ deopt.deopt;
+ return result.join('');
+}
+
+// Continue in the inner loop.
+function j(t, deopt) {
+ var result = [];
+ for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ continue;
+ }
+ }
+ deopt.deopt;
+ return result.join('');
+}
+
+// Continue of the outer loop.
+function k(t, deopt) {
+ var result = [];
+ outer: for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ continue outer;
+ }
+ }
+ deopt.deopt;
+ return result.join('');
+}
+
+// Break of the outer loop.
+function l(t, deopt) {
+ var result = [];
+ outer: for (var i in t) {
+ for (var j in t) {
+ result.push(i + j + t[i] + t[j]);
+ break outer;
+ }
+ }
+ deopt.deopt;
+ return result.join('');
+}
+
+// Test deoptimization from inlined frame (currently it is not inlined).
+function m0(t, deopt) {
+ for (var i in t) {
+ for (var j in t) {
+ deopt.deopt;
+ return i + j + t[i] + t[j];
+ }
+ }
+}
+
+function m(t, deopt) {
+ return m0(t, deopt);
+}
+
+
+function tryFunction(s, mkT, f) {
+ var d = {deopt: false};
+ assertEquals(s, f(mkT(), d));
+ assertEquals(s, f(mkT(), d));
+ assertEquals(s, f(mkT(), d));
+ %OptimizeFunctionOnNextCall(f);
+ assertEquals(s, f(mkT(), d));
+ assertEquals(s, f(mkT(), {}));
+}
+
+var s = "a1b2c3d4";
+function mkTable() { return { a: "1", b: "2", c: "3", d: "4" }; }
+
+
+tryFunction(s, mkTable, a);
+tryFunction(s, mkTable, b);
+tryFunction("0a1b2c3d", function () { return "abcd"; }, c);
+tryFunction("0a1b2c3d", function () {
+ var cnt = false;
+ return function () {
+ cnt = true;
+ return "abcd";
+ }
+}, d);
+tryFunction("aabbccdd", mkTable, e);
+
+function mkSmallTable() { return { a: "1", b: "2" }; }
+
+tryFunction("aa11ab12ba21bb22", mkSmallTable, f);
+tryFunction("aa11ab12bb22ba21", mkSmallTable, g);
+tryFunction("aa11ba21", mkSmallTable, h);
+tryFunction("aa11ab12ba21bb22", mkSmallTable, j);
+tryFunction("aa11ba21", mkSmallTable, h);
+tryFunction("aa11ba21", mkSmallTable, k);
+tryFunction("aa11", mkSmallTable, l);
+tryFunction("aa11", mkSmallTable, m);
+
+// Test handling of null.
+tryFunction("", function () {
+ return function () { return null; }
+}, function (t) {
+ for (var i in t()) { return i; }
+ return "";
+});
+
+// Test smis.
+tryFunction("", function () {
+ return function () { return 11; }
+}, function (t) {
+ for (var i in t()) { return i; }
+ return "";
+});
+
+// Test LoadFieldByIndex for out of object properties.
+function O() { this.a = 1; }
+for (var i = 0; i < 10; i++) new O();
+tryFunction("a1b2c3d4e5f6", function () {
+ var o = new O();
+ o.b = 2;
+ o.c = 3;
+ o.d = 4;
+ o.e = 5;
+ o.f = 6;
+ return o;
+}, function (t) {
+ var r = [];
+ for (var i in t) r.push(i + t[i]);
+ return r.join('');
+});
+
+// Test OSR inside for-in.
+function osr_inner(t, limit) {
+ var r = 1;
+ for (var x in t) {
+ if (t.hasOwnProperty(x)) {
+ for (var i = 0; i < t[x].length; i++) {
+ r += t[x][i];
+ if (i === limit) {
+ %OptimizeFunctionOnNextCall(osr_inner, "osr");
+ }
+ }
+ r += x;
+ }
+ }
+ return r;
+}
+
+function osr_outer(t, osr_after) {
+ var r = 1;
+ for (var x in t) {
+ for (var i = 0; i < t[x].length; i++) {
+ r += t[x][i];
+ }
+ if (x === osr_after) {
+ %OptimizeFunctionOnNextCall(osr_outer, "osr");
+ }
+ r += x;
+ }
+ return r;
+}
+
+function osr_outer_and_deopt(t, osr_after) {
+ var r = 1;
+ for (var x in t) {
+ r += x;
+ if (x == osr_after) {
+ %OptimizeFunctionOnNextCall(osr_outer_and_deopt, "osr");
+ }
+ }
+ return r;
+}
+
+function test_osr() {
+ with ({}) {} // Disable optimizations of this function.
+ var arr = new Array(20);
+ for (var i = 0; i < arr.length; i++) {
+ arr[i] = i + 1;
+ }
+ arr.push(":"); // Force deopt at the end of the loop.
+ assertEquals("211:x1234567891011121314151617181920:y", osr_inner({x: arr, y: arr}, (arr.length / 2) | 0));
+ assertEquals("7x456y", osr_outer({x: [1,2,3], y: [4,5,6]}, "x"));
+ assertEquals("101234567", osr_outer_and_deopt([1,2,3,4,5,6,7,8], "5"));
+}
+
+test_osr();
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-106351.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-106351.js
new file mode 100644
index 0000000..2a67a05
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-106351.js
@@ -0,0 +1,38 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test Math.round with the input reused in the same expression.
+function test(x) {
+ var v = Math.round(x) - x;
+ assertEquals(0.5, v);
+}
+
+for (var i = 0; i < 5; ++i) test(0.5);
+%OptimizeFunctionOnNextCall(test);
+test(0.5);
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js
index d82c690..c408096 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-deopt-call-as-function.js
@@ -32,7 +32,7 @@ function bar(a, b) {try { return a; } finally { } }
function test_context() {
function foo(x) { return 42; }
var s, t;
- for (var i = 0x7ff00000; i < 0x80000000; i++) {
+ for (var i = 0x7fff0000; i < 0x80000000; i++) {
bar(t = foo(i) ? bar(42 + i - i) : bar(0), s = i + t);
}
return s;
@@ -43,7 +43,7 @@ assertEquals(0x7fffffff + 42, test_context());
function value_context() {
function foo(x) { return 42; }
var s, t;
- for (var i = 0x7ff00000; i < 0x80000000; i++) {
+ for (var i = 0x7fff0000; i < 0x80000000; i++) {
bar(t = foo(i), s = i + t);
}
return s;
@@ -54,7 +54,7 @@ assertEquals(0x7fffffff + 42, value_context());
function effect_context() {
function foo(x) { return 42; }
var s, t;
- for (var i = 0x7ff00000; i < 0x80000000; i++) {
+ for (var i = 0x7fff0000; i < 0x80000000; i++) {
bar(foo(i), s = i + 42);
}
return s;
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-funarguments.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-funarguments.js
index cea40bc..c913bd9 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/regress-funarguments.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-funarguments.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
// Test function.arguments.
function A() {}
@@ -60,13 +62,16 @@ function hej(x) {
return o.g(x, "z");
}
-function stress() {
- for (var i=0; i<5000000; i++) o.g(i, "g");
- for (var j=0; j<5000000; j++) hej(j);
+function opt() {
+ for (var k=0; k<2; k++) {
+ for (var i=0; i<5; i++) o.g(i, "g");
+ for (var j=0; j<5; j++) hej(j);
+ }
+ %OptimizeFunctionOnNextCall(o.g);
+ %OptimizeFunctionOnNextCall(hej);
}
-stress();
-
+opt();
assertArrayEquals([0, "g"], o.g(0, "g"));
assertArrayEquals([1, "f"], o.g(1, "g"));
assertArrayEquals([0, "h"], hej(0));
@@ -74,8 +79,7 @@ assertArrayEquals([1, "f"], hej(1));
o = new B();
-stress();
-
+opt();
assertArrayEquals([0, "f"], o.g(0, "g"));
assertArrayEquals([1, "g"], o.g(1, "g"));
assertArrayEquals([0, "f"], hej(0));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-funcaller.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-funcaller.js
index 88db147..5c2a597 100644
--- a/src/3rdparty/v8/test/mjsunit/compiler/regress-funcaller.js
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-funcaller.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
// Test function.caller.
function A() {}
@@ -40,9 +42,10 @@ A.prototype.g = gee;
var o = new A();
-for (var i=0; i<5000000; i++) {
+for (var i=0; i<5; i++) {
o.g(i);
}
+%OptimizeFunctionOnNextCall(o.g);
assertEquals(gee, o.g(0));
assertEquals(null, o.g(1));
@@ -53,9 +56,10 @@ function hej(x) {
return o.g(x);
}
-for (var j=0; j<5000000; j++) {
+for (var j=0; j<5; j++) {
hej(j);
}
+%OptimizeFunctionOnNextCall(hej);
assertEquals(gee, hej(0));
assertEquals(hej, hej(1));
@@ -66,8 +70,9 @@ function from_eval(x) {
return o.g(x);
}
-for (var j=0; j<5000000; j++) {
+for (var j=0; j<5; j++) {
from_eval(j);
}
+%OptimizeFunctionOnNextCall(from_eval);
assertEquals(gee, from_eval(0));
assertEquals(from_eval, from_eval(1));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-lazy-deopt.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-lazy-deopt.js
new file mode 100644
index 0000000..d1c3d01
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-lazy-deopt.js
@@ -0,0 +1,48 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Test lazy deoptimization after CallFunctionStub.
+
+function foo() { return 1; }
+
+function f(x, y) {
+ var a = [0];
+ if (x == 0) {
+ %DeoptimizeFunction(f);
+ return 1;
+ }
+ a[0] = %_CallFunction(null, x - 1, f);
+ return x >> a[0];
+}
+
+f(42);
+f(42);
+assertEquals(42, f(42));
+%OptimizeFunctionOnNextCall(f);
+assertEquals(42, f(42));
diff --git a/src/3rdparty/v8/test/mjsunit/compiler/regress-toint32.js b/src/3rdparty/v8/test/mjsunit/compiler/regress-toint32.js
new file mode 100644
index 0000000..54c2f76
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/compiler/regress-toint32.js
@@ -0,0 +1,45 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --noenable-sse3
+
+var a = new Int32Array(1);
+var G = 0x80000000;
+
+function f(x) {
+ var v = x;
+ v = v + 1;
+ a[0] = v;
+ v = v - 1;
+ return v;
+}
+
+assertEquals(G, f(G));
+assertEquals(G, f(G));
+%OptimizeFunctionOnNextCall(f);
+assertEquals(G, f(G));
+
diff --git a/src/3rdparty/v8/test/mjsunit/count-based-osr.js b/src/3rdparty/v8/test/mjsunit/count-based-osr.js
new file mode 100644
index 0000000..125c4e2
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/count-based-osr.js
@@ -0,0 +1,38 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --count-based-interrupts --interrupt-budget=10 --weighted-back-edges --allow-natives-syntax
+
+// Test that OSR works properly when using count-based interrupting/profiling.
+
+function osr_this() {
+ var a = 1;
+ // Trigger OSR.
+ while (%GetOptimizationStatus(osr_this) == 2) {}
+ return a;
+}
+assertEquals(1, osr_this());
diff --git a/src/3rdparty/v8/test/mjsunit/d8-os.js b/src/3rdparty/v8/test/mjsunit/d8-os.js
index 5640326..239938c 100644
--- a/src/3rdparty/v8/test/mjsunit/d8-os.js
+++ b/src/3rdparty/v8/test/mjsunit/d8-os.js
@@ -54,6 +54,8 @@ function str_error(str) {
if (this.os && os.system) {
+ // Ensure that we have a valid working directory.
+ os.chdir("/tmp");
try {
// Delete the dir if it is lying around from last time.
os.system("ls", [TEST_DIR]);
@@ -61,52 +63,53 @@ if (this.os && os.system) {
} catch (e) {
}
os.mkdirp(TEST_DIR);
- os.chdir(TEST_DIR);
try {
// Check the chdir worked.
os.system('ls', [TEST_DIR]);
// Simple create dir.
- os.mkdirp("dir");
+ os.mkdirp(TEST_DIR + "/dir");
// Create dir in dir.
- os.mkdirp("dir/foo");
+ os.mkdirp(TEST_DIR + "/dir/foo");
// Check that they are there.
- os.system('ls', ['dir/foo']);
+ os.system('ls', [TEST_DIR + '/dir/foo']);
// Check that we can detect when something is not there.
- assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
+ assertThrows("os.system('ls', [TEST_DIR + '/dir/bar']);", "dir not there");
// Check that mkdirp makes intermediate directories.
- os.mkdirp("dir2/foo");
- os.system("ls", ["dir2/foo"]);
+ os.mkdirp(TEST_DIR + "/dir2/foo");
+ os.system("ls", [TEST_DIR + "/dir2/foo"]);
// Check that mkdirp doesn't mind if the dir is already there.
- os.mkdirp("dir2/foo");
- os.mkdirp("dir2/foo/");
+ os.mkdirp(TEST_DIR + "/dir2/foo");
+ os.mkdirp(TEST_DIR + "/dir2/foo/");
// Check that mkdirp can cope with trailing /
- os.mkdirp("dir3/");
- os.system("ls", ["dir3"]);
+ os.mkdirp(TEST_DIR + "/dir3/");
+ os.system("ls", [TEST_DIR + "/dir3"]);
// Check that we get an error if the name is taken by a file.
- os.system("sh", ["-c", "echo foo > file1"]);
- os.system("ls", ["file1"]);
- assertThrows("os.mkdirp('file1');", "mkdir over file1");
- assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
- assertThrows("os.mkdirp('file1/');", "mkdir over file3");
- assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
+ os.system("sh", ["-c", "echo foo > " + TEST_DIR + "/file1"]);
+ os.system("ls", [TEST_DIR + "/file1"]);
+ assertThrows("os.mkdirp(TEST_DIR + '/file1');", "mkdir over file1");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/foo');", "mkdir over file2");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/');", "mkdir over file3");
+ assertThrows("os.mkdirp(TEST_DIR + '/file1/foo/');", "mkdir over file4");
// Create a dir we cannot read.
- os.mkdirp("dir4", 0);
+ os.mkdirp(TEST_DIR + "/dir4", 0);
// This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir4');", "chdir dir4 I");
- os.rmdir("dir4");
- assertThrows("os.chdir('dir4');", "chdir dir4 II");
- // Set umask.
+ assertThrows("os.chdir(TEST_DIR + '/dir4');", "chdir dir4 I");
+ os.rmdir(TEST_DIR + "/dir4");
+ assertThrows("os.chdir(TEST_DIR + '/dir4');", "chdir dir4 II");
+
+ // Set umask. This changes the umask for the whole process and is
+ // the reason why the test cannot be run multi-threaded.
var old_umask = os.umask(0777);
// Create a dir we cannot read.
- os.mkdirp("dir5");
+ os.mkdirp(TEST_DIR + "/dir5");
// This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir5');", "cd dir5 I");
- os.rmdir("dir5");
- assertThrows("os.chdir('dir5');", "chdir dir5 II");
+ assertThrows("os.chdir(TEST_DIR + '/dir5');", "cd dir5 I");
+ os.rmdir(TEST_DIR + "/dir5");
+ assertThrows("os.chdir(TEST_DIR + '/dir5');", "chdir dir5 II");
os.umask(old_umask);
- os.mkdirp("hest/fisk/../fisk/ged");
- os.system("ls", ["hest/fisk/ged"]);
+ os.mkdirp(TEST_DIR + "/hest/fisk/../fisk/ged");
+ os.system("ls", [TEST_DIR + "/hest/fisk/ged"]);
os.setenv("FOO", "bar");
var environment = os.system("printenv");
@@ -143,42 +146,43 @@ if (this.os && os.system) {
assertEquals("baz\n", os.system("echo", ["baz"]));
//}
}
+
+ // Too few args.
+ arg_error("os.umask();");
+ arg_error("os.system();");
+ arg_error("os.mkdirp();");
+ arg_error("os.chdir();");
+ arg_error("os.setenv();");
+ arg_error("os.rmdir();");
+
+ // Too many args.
+ arg_error("os.setenv('FOO=bar');");
+ arg_error("os.umask(0, 0);");
+ arg_error("os.system('ls', [], -1, -1, -1);");
+ arg_error("os.mkdirp('foo', 0, 0)");
+ arg_error("os.chdir('foo', 'bar')");
+ arg_error("os.rmdir('foo', 'bar');");
+
+ // Wrong kind of args.
+ arg_error("os.umask([]);");
+ arg_error("os.system('ls', 'foo');");
+ arg_error("os.system('ls', 123);");
+ arg_error("os.system('ls', [], 'foo');");
+ arg_error("os.system('ls', [], -1, 'foo');");
+ arg_error("os.mkdirp('foo', 'bar');");
+
+ // Test broken toString().
+ str_error("os.system(e);");
+ str_error("os.system('ls', [e]);");
+ str_error("os.system('ls', ['.', e]);");
+ str_error("os.system('ls', [e, '.']);");
+ str_error("os.mkdirp(e);");
+ str_error("os.setenv(e, 'goo');");
+ str_error("os.setenv('goo', e);");
+ str_error("os.chdir(e);");
+ str_error("os.rmdir(e);");
+
} finally {
os.system("rm", ["-r", TEST_DIR]);
}
-
- // Too few args.
- arg_error("os.umask();");
- arg_error("os.system();");
- arg_error("os.mkdirp();");
- arg_error("os.chdir();");
- arg_error("os.setenv();");
- arg_error("os.rmdir();");
-
- // Too many args.
- arg_error("os.setenv('FOO=bar');");
- arg_error("os.umask(0, 0);");
- arg_error("os.system('ls', [], -1, -1, -1);");
- arg_error("os.mkdirp('foo', 0, 0)");
- arg_error("os.chdir('foo', 'bar')");
- arg_error("os.rmdir('foo', 'bar');");
-
- // Wrong kind of args.
- arg_error("os.umask([]);");
- arg_error("os.system('ls', 'foo');");
- arg_error("os.system('ls', 123);");
- arg_error("os.system('ls', [], 'foo');");
- arg_error("os.system('ls', [], -1, 'foo');");
- arg_error("os.mkdirp('foo', 'bar');");
-
- // Test broken toString().
- str_error("os.system(e);");
- str_error("os.system('ls', [e]);");
- str_error("os.system('ls', ['.', e]);");
- str_error("os.system('ls', [e, '.']);");
- str_error("os.mkdirp(e);");
- str_error("os.setenv(e, 'goo');");
- str_error("os.setenv('goo', e);");
- str_error("os.chdir(e);");
- str_error("os.rmdir(e);");
}
diff --git a/src/3rdparty/v8/test/mjsunit/date.js b/src/3rdparty/v8/test/mjsunit/date.js
index fa43cbb..3e153ab 100644
--- a/src/3rdparty/v8/test/mjsunit/date.js
+++ b/src/3rdparty/v8/test/mjsunit/date.js
@@ -187,6 +187,12 @@ d = new Date(1969, 12, 1, Infinity);
assertTrue(isNaN(d.getTime()));
d = new Date(1969, 12, 1, -Infinity);
assertTrue(isNaN(d.getTime()));
+d = new Date(1969, 12, 1, 0);
+d.setTime(Math.pow(2, 64));
+assertTrue(isNaN(d.getTime()));
+d = new Date(1969, 12, 1, 0);
+d.setTime(Math.pow(-2, 64));
+assertTrue(isNaN(d.getTime()));
// Test creation with obscure date values.
diff --git a/src/3rdparty/v8/test/mjsunit/debug-break-inline.js b/src/3rdparty/v8/test/mjsunit/debug-break-inline.js
new file mode 100644
index 0000000..4418fa8
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-break-inline.js
@@ -0,0 +1,100 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --allow-natives-syntax
+
+// This test tests that deoptimization due to debug breaks works for
+// inlined functions where the full-code is generated before the
+// debugger is attached.
+//
+//See http://code.google.com/p/chromium/issues/detail?id=105375
+
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug;
+
+var count = 0;
+var break_count = 0;
+
+// Debug event listener which sets a breakpoint first time it is hit
+// and otherwise counts break points hit and checks that the expected
+// state is reached.
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ break_count++;
+ if (break_count == 1) {
+ Debug.setBreakPoint(g, 3);
+
+ for (var i = 0; i < exec_state.frameCount(); i++) {
+ var frame = exec_state.frame(i);
+ // When function f is optimized (1 means YES, see runtime.cc) we
+ // expect an optimized frame for f and g.
+ if (%GetOptimizationStatus(f) == 1) {
+ if (i == 1) {
+ assertTrue(frame.isOptimizedFrame());
+ assertTrue(frame.isInlinedFrame());
+ assertEquals(4 - i, frame.inlinedFrameIndex());
+ } else if (i == 2) {
+ assertTrue(frame.isOptimizedFrame());
+ assertFalse(frame.isInlinedFrame());
+ } else {
+ assertFalse(frame.isOptimizedFrame());
+ assertFalse(frame.isInlinedFrame());
+ }
+ }
+ }
+ }
+ }
+}
+
+function f() {
+ g();
+}
+
+function g() {
+ count++;
+ h();
+ var b = 1; // Break point is set here.
+}
+
+function h() {
+ debugger;
+}
+
+f();f();f();
+%OptimizeFunctionOnNextCall(f);
+f();
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+f();
+
+assertEquals(5, count);
+assertEquals(2, break_count);
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
index 9ed1dbb..cf25c0c 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized-double.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -34,6 +34,27 @@ var exception = false;
var testingConstructCall = false;
+var input = [
+ {a: 1, b: 2},
+ {a: 3, b: 4},
+ {a: 5, b: 6},
+ {a: 7, b: 8},
+ {a: 9, b: 10}
+];
+
+var expected = [
+ { locals: {a0: 1.01, b0: 2.02}, args: { names: ["i", "x0", "y0"], values: [0, 3.03, 4.04] } },
+ { locals: {a1: 3.03, b1: 4.04}, args: { names: ["i", "x1", "y1"], values: [1, 5.05, 6.06] } },
+ { locals: {a2: 5.05, b2: 6.06}, args: { names: ["i"], values: [2] } },
+ { locals: {a3: 7.07, b3: 8.08}, args: { names: ["i", "x3", "y3", "z3"],
+ values: [3, 9.09, 10.10, undefined] }
+ },
+ { locals: {a4: 9.09, b4: 10.10}, args: { names: ["i", "x4", "y4"], values: [4, 11.11, 12.12] } }
+];
+
+function arraySum(arr) {
+ return arr.reduce(function (a, b) { return a + b; }, 0);
+}
function listener(event, exec_state, event_data, data) {
try {
@@ -44,40 +65,63 @@ function listener(event, exec_state, event_data, data) {
for (var i = 0; i < exec_state.frameCount(); i++) {
var frame = exec_state.frame(i);
if (i < exec_state.frameCount() - 1) {
- var expected_a = i * 2 + 1 + (i * 2 + 1) / 100;
- var expected_b = i * 2 + 2 + (i * 2 + 2) / 100;
- var expected_x = (i + 1) * 2 + 1 + ((i + 1) * 2 + 1) / 100;
- var expected_y = (i + 1) * 2 + 2 + ((i + 1) * 2 + 2) / 100;
-
- // All frames except the bottom one has normal variables a and b.
- assertEquals('a', frame.localName(0));
- assertEquals('b', frame.localName(1));
- assertEquals(expected_a, frame.localValue(0).value());
- assertEquals(expected_b, frame.localValue(1).value());
-
- // All frames except the bottom one has arguments variables x and y.
- assertEquals('x', frame.argumentName(0));
- assertEquals('y', frame.argumentName(1));
- assertEquals(expected_x, frame.argumentValue(0).value());
- assertEquals(expected_y, frame.argumentValue(1).value());
+ var expected_args = expected[i].args;
+ var expected_locals = expected[i].locals;
+
+ // All frames except the bottom one have expected locals.
+ var locals = {};
+ for (var j = 0; j < frame.localCount(); j++) {
+ locals[frame.localName(j)] = frame.localValue(j).value();
+ }
+ assertPropertiesEqual(expected_locals, locals);
+
+ // All frames except the bottom one have expected arguments.
+ for (var j = 0; j < expected_args.names.length; j++) {
+ assertEquals(expected_args.names[j], frame.argumentName(j));
+ assertEquals(expected_args.values[j], frame.argumentValue(j).value());
+ }
// All frames except the bottom one have two scopes.
assertEquals(2, frame.scopeCount());
assertEquals(debug.ScopeType.Local, frame.scope(0).scopeType());
assertEquals(debug.ScopeType.Global, frame.scope(1).scopeType());
- assertEquals(expected_a, frame.scope(0).scopeObject().value()['a']);
- assertEquals(expected_b, frame.scope(0).scopeObject().value()['b']);
- assertEquals(expected_x, frame.scope(0).scopeObject().value()['x']);
- assertEquals(expected_y, frame.scope(0).scopeObject().value()['y']);
+
+ Object.keys(expected_locals).forEach(function (name) {
+ assertEquals(expected_locals[name], frame.scope(0).scopeObject().value()[name]);
+ });
+
+ for (var j = 0; j < expected_args.names.length; j++) {
+ var arg_name = expected_args.names[j];
+ var arg_value = expected_args.values[j];
+ assertEquals(arg_value, frame.scope(0).scopeObject().value()[arg_name]);
+ }
// Evaluate in the inlined frame.
- assertEquals(expected_a, frame.evaluate('a').value());
- assertEquals(expected_x, frame.evaluate('x').value());
- assertEquals(expected_x, frame.evaluate('arguments[0]').value());
- assertEquals(expected_a + expected_b + expected_x + expected_y,
- frame.evaluate('a + b + x + y').value());
- assertEquals(expected_x + expected_y,
- frame.evaluate('arguments[0] + arguments[1]').value());
+ Object.keys(expected_locals).forEach(function (name) {
+ assertEquals(expected_locals[name], frame.evaluate(name).value());
+ });
+
+ for (var j = 0; j < expected_args.names.length; j++) {
+ var arg_name = expected_args.names[j];
+ var arg_value = expected_args.values[j];
+ assertEquals(arg_value, frame.evaluate(arg_name).value());
+ assertEquals(arg_value, frame.evaluate('arguments['+j+']').value());
+ }
+
+ var expected_args_sum = arraySum(expected_args.values);
+ var expected_locals_sum =
+ arraySum(Object.keys(expected_locals).
+ map(function (k) { return expected_locals[k]; }));
+
+ assertEquals(expected_locals_sum + expected_args_sum,
+ frame.evaluate(Object.keys(expected_locals).join('+') + ' + ' +
+ expected_args.names.join('+')).value());
+
+ var arguments_sum = expected_args.names.map(function(_, idx) {
+ return "arguments[" + idx + "]";
+ }).join('+');
+ assertEquals(expected_args_sum,
+ frame.evaluate(arguments_sum).value());
} else {
// The bottom frame only have the global scope.
assertEquals(1, frame.scopeCount());
@@ -96,7 +140,13 @@ function listener(event, exec_state, event_data, data) {
}
// Check for construct call.
- assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+ if (i == 4) {
+ assertEquals(testingConstructCall, frame.isConstructCall());
+ } else if (i == 2) {
+ assertTrue(frame.isConstructCall());
+ } else {
+ assertFalse(frame.isConstructCall());
+ }
// When function f is optimized (1 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -119,65 +169,70 @@ function listener(event, exec_state, event_data, data) {
listenerComplete = true;
}
} catch (e) {
- exception = e
+ exception = e.toString() + e.stack;
};
};
-f();f();f();
+for (var i = 0; i < 4; i++) f(input.length - 1, 11.11, 12.12);
%OptimizeFunctionOnNextCall(f);
-f();
+f(input.length - 1, 11.11, 12.12);
// Add the debug event listener.
Debug.setListener(listener);
-function h(x, y) {
- var a = 1;
- var b = 2;
- a = a + a / 100;
- b = b + b / 100;
+function h(i, x0, y0) {
+ var a0 = input[i].a;
+ var b0 = input[i].b;
+ a0 = a0 + a0 / 100;
+ b0 = b0 + b0 / 100;
debugger; // Breakpoint.
};
-function g3(x, y) {
- var a = 3;
- var b = 4;
- a = a + a / 100;
- b = b + b / 100;
- h(a, b);
- return a+b;
+function g3(i, x1, y1) {
+ var a1 = input[i].a;
+ var b1 = input[i].b;
+ a1 = a1 + a1 / 100;
+ b1 = b1 + b1 / 100;
+ h(i - 1, a1, b1);
+ return a1+b1;
};
-function g2(x, y) {
- var a = 5;
- var b = 6;
- a = a + a / 100;
- b = b + b / 100;
- g3(a, b);
+function g2(i) {
+ var a2 = input[i].a;
+ var b2 = input[i].b;
+ a2 = a2 + a2 / 100;
+ b2 = b2 + b2 / 100;
+ g3(i - 1, a2, b2);
};
-function g1(x, y) {
- var a = 7;
- var b = 8;
- a = a + a / 100;
- b = b + b / 100;
- g2(a, b);
+function g1(i, x3, y3, z3) {
+ var a3 = input[i].a;
+ var b3 = input[i].b;
+ a3 = a3 + a3 / 100;
+ b3 = b3 + b3 / 100;
+ new g2(i - 1, a3, b3);
};
-function f(x, y) {
- var a = 9;
- var b = 10;
- a = a + a / 100;
- b = b + b / 100;
- g1(a, b);
+function f(i, x4, y4) {
+ var a4 = input[i].a;
+ var b4 = input[i].b;
+ a4 = a4 + a4 / 100;
+ b4 = b4 + b4 / 100;
+ g1(i - 1, a4, b4);
};
// Test calling f normally and as a constructor.
-f(11.11, 12.12);
+f(input.length - 1, 11.11, 12.12);
+f(input.length - 1, 11.11, 12.12, "");
testingConstructCall = true;
-new f(11.11, 12.12);
+new f(input.length - 1, 11.11, 12.12);
+new f(input.length - 1, 11.11, 12.12, "");
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
assertFalse(exception, "exception in listener " + exception)
assertTrue(listenerComplete);
+//Throw away type information for next run.
+gc();
+
Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
index 683c139..c88a683 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-evaluate-locals-optimized.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --expose-debug-as debug --allow-natives-syntax
+// Flags: --expose-debug-as debug --expose-gc --allow-natives-syntax --inline-construct
// Get the Debug object exposed from the debug context global object.
Debug = debug.Debug
@@ -34,6 +34,17 @@ var exception = false;
var testingConstructCall = false;
+var expected = [
+ { locals: {a0: 1, b0: 2}, args: { names: ["i", "x0", "y0"], values: [0, 3, 4] } },
+ { locals: {a1: 3, b1: 4}, args: { names: ["i", "x1", "y1"], values: [1, 5, 6] } },
+ { locals: {a2: 5, b2: 6}, args: { names: ["i"], values: [2] } },
+ { locals: {a3: 7, b3: 8}, args: { names: ["i", "x3", "y3", "z3"], values: [3, 9, 10, undefined] } },
+ { locals: {a4: 9, b4: 10}, args: { names: ["i", "x4", "y4"], values: [4, 11, 12] } }
+];
+
+function arraySum(arr) {
+ return arr.reduce(function (a, b) { return a + b; }, 0);
+}
function listener(event, exec_state, event_data, data) {
try {
@@ -44,40 +55,63 @@ function listener(event, exec_state, event_data, data) {
for (var i = 0; i < exec_state.frameCount(); i++) {
var frame = exec_state.frame(i);
if (i < exec_state.frameCount() - 1) {
- var expected_a = i * 2 + 1;
- var expected_b = i * 2 + 2;
- var expected_x = (i + 1) * 2 + 1;
- var expected_y = (i + 1) * 2 + 2;
-
- // All frames except the bottom one has normal variables a and b.
- assertEquals('a', frame.localName(0));
- assertEquals('b', frame.localName(1));
- assertEquals(expected_a, frame.localValue(0).value());
- assertEquals(expected_b, frame.localValue(1).value());
-
- // All frames except the bottom one has arguments variables x and y.
- assertEquals('x', frame.argumentName(0));
- assertEquals('y', frame.argumentName(1));
- assertEquals(expected_x, frame.argumentValue(0).value());
- assertEquals(expected_y, frame.argumentValue(1).value());
+ var expected_args = expected[i].args;
+ var expected_locals = expected[i].locals;
+
+ // All frames except the bottom one have expected locals.
+ var locals = {};
+ for (var j = 0; j < frame.localCount(); j++) {
+ locals[frame.localName(j)] = frame.localValue(j).value();
+ }
+ assertPropertiesEqual(expected_locals, locals);
+
+ // All frames except the bottom one have expected arguments.
+ for (var j = 0; j < expected_args.names.length; j++) {
+ assertEquals(expected_args.names[j], frame.argumentName(j));
+ assertEquals(expected_args.values[j], frame.argumentValue(j).value());
+ }
// All frames except the bottom one have two scopes.
assertEquals(2, frame.scopeCount());
assertEquals(debug.ScopeType.Local, frame.scope(0).scopeType());
assertEquals(debug.ScopeType.Global, frame.scope(1).scopeType());
- assertEquals(expected_a, frame.scope(0).scopeObject().value()['a']);
- assertEquals(expected_b, frame.scope(0).scopeObject().value()['b']);
- assertEquals(expected_x, frame.scope(0).scopeObject().value()['x']);
- assertEquals(expected_y, frame.scope(0).scopeObject().value()['y']);
+
+ Object.keys(expected_locals).forEach(function (name) {
+ assertEquals(expected_locals[name], frame.scope(0).scopeObject().value()[name]);
+ });
+
+ for (var j = 0; j < expected_args.names.length; j++) {
+ var arg_name = expected_args.names[j];
+ var arg_value = expected_args.values[j];
+ assertEquals(arg_value, frame.scope(0).scopeObject().value()[arg_name]);
+ }
// Evaluate in the inlined frame.
- assertEquals(expected_a, frame.evaluate('a').value());
- assertEquals(expected_x, frame.evaluate('x').value());
- assertEquals(expected_x, frame.evaluate('arguments[0]').value());
- assertEquals(expected_a + expected_b + expected_x + expected_y,
- frame.evaluate('a + b + x + y').value());
- assertEquals(expected_x + expected_y,
- frame.evaluate('arguments[0] + arguments[1]').value());
+ Object.keys(expected_locals).forEach(function (name) {
+ assertEquals(expected_locals[name], frame.evaluate(name).value());
+ });
+
+ for (var j = 0; j < expected_args.names.length; j++) {
+ var arg_name = expected_args.names[j];
+ var arg_value = expected_args.values[j];
+ assertEquals(arg_value, frame.evaluate(arg_name).value());
+ assertEquals(arg_value, frame.evaluate('arguments['+j+']').value());
+ }
+
+ var expected_args_sum = arraySum(expected_args.values);
+ var expected_locals_sum =
+ arraySum(Object.keys(expected_locals).
+ map(function (k) { return expected_locals[k]; }));
+
+ assertEquals(expected_locals_sum + expected_args_sum,
+ frame.evaluate(Object.keys(expected_locals).join('+') + ' + ' +
+ expected_args.names.join('+')).value());
+
+ var arguments_sum = expected_args.names.map(function(_, idx) {
+ return "arguments[" + idx + "]";
+ }).join('+');
+ assertEquals(expected_args_sum,
+ frame.evaluate(arguments_sum).value());
} else {
// The bottom frame only have the global scope.
assertEquals(1, frame.scopeCount());
@@ -96,7 +130,13 @@ function listener(event, exec_state, event_data, data) {
}
// Check for construct call.
- assertEquals(testingConstructCall && i == 4, frame.isConstructCall());
+ if (i == 4) {
+ assertEquals(testingConstructCall, frame.isConstructCall());
+ } else if (i == 2) {
+ assertTrue(frame.isConstructCall());
+ } else {
+ assertFalse(frame.isConstructCall());
+ }
// When function f is optimized (1 means YES, see runtime.cc) we
// expect an optimized frame for f with g1, g2 and g3 inlined.
@@ -119,54 +159,59 @@ function listener(event, exec_state, event_data, data) {
listenerComplete = true;
}
} catch (e) {
- exception = e
+ exception = e.toString() + e.stack;
};
};
-f();f();f();
+for (var i = 0; i < 4; i++) f(expected.length - 1, 11, 12);
%OptimizeFunctionOnNextCall(f);
-f();
+f(expected.length - 1, 11, 12);
// Add the debug event listener.
Debug.setListener(listener);
-function h(x, y) {
- var a = 1;
- var b = 2;
+function h(i, x0, y0) {
+ var a0 = expected[i].locals.a0;
+ var b0 = expected[i].locals.b0;
debugger; // Breakpoint.
-};
-
-function g3(x, y) {
- var a = 3;
- var b = 4;
- h(a, b);
-};
-
-function g2(x, y) {
- var a = 5;
- var b = 6;
- g3(a, b);
-};
-
-function g1(x, y) {
- var a = 7;
- var b = 8;
- g2(a, b);
-};
-
-function f(x, y) {
- var a = 9;
- var b = 10;
- g1(a, b);
-};
+}
+
+function g3(i, x1, y1) {
+ var a1 = expected[i].locals.a1;
+ var b1 = expected[i].locals.b1;
+ h(i - 1, a1, b1);
+}
+
+function g2(i) {
+ var a2 = expected[i].locals.a2;
+ var b2 = expected[i].locals.b2;
+ g3(i - 1, a2, b2);
+}
+
+function g1(i, x3, y3, z3) {
+ var a3 = expected[i].locals.a3;
+ var b3 = expected[i].locals.b3;
+ new g2(i - 1, a3, b3);
+}
+
+function f(i, x4, y4) {
+ var a4 = expected[i].locals.a4;
+ var b4 = expected[i].locals.b4;
+ g1(i - 1, a4, b4);
+}
// Test calling f normally and as a constructor.
-f(11, 12);
+f(expected.length - 1, 11, 12);
+f(expected.length - 1, 11, 12, 0);
testingConstructCall = true;
-new f(11, 12);
+new f(expected.length - 1, 11, 12);
+new f(expected.length - 1, 11, 12, 0);
-// Make sure that the debug event listener vas invoked.
+// Make sure that the debug event listener was invoked.
assertFalse(exception, "exception in listener " + exception)
assertTrue(listenerComplete);
+// Throw away type information for next run.
+gc();
+
Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-scopes.js b/src/3rdparty/v8/test/mjsunit/debug-scopes.js
index 0788a55..942bd2b 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-scopes.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-scopes.js
@@ -29,9 +29,8 @@
// The functions used for testing backtraces. They are at the top to make the
// testing of source line/column easier.
-
// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug;
+var Debug = debug.Debug;
var test_name;
var listener_delegate;
diff --git a/src/3rdparty/v8/test/mjsunit/debug-set-script-source.js b/src/3rdparty/v8/test/mjsunit/debug-set-script-source.js
new file mode 100644
index 0000000..34ae848
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-set-script-source.js
@@ -0,0 +1,64 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var script_number = 0;
+var script_names = [];
+var exception = null;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.BeforeCompile) {
+ event_data.script().setSource(event_data.script().source() +
+ " //@ sourceURL=proper_location_" + (++script_number));
+ } else if (event == Debug.DebugEvent.AfterCompile) {
+ try {
+ event_data.script().setSource("a=1 //@ sourceURL=wrong_location");
+ } catch(e) {
+ exception = e;
+ }
+ script_names.push(event_data.script().name());
+ }
+};
+
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+// Compile different sources.
+eval('a=1');
+eval('(function(){})');
+
+assertEquals(2, script_names.length);
+assertEquals("proper_location_1", script_names[0]);
+assertEquals("proper_location_2", script_names[1]);
+
+assertEquals("illegal access", exception);
+
+Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-setbreakpoint.js b/src/3rdparty/v8/test/mjsunit/debug-setbreakpoint.js
index 03ba28e..90dfcd1 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-setbreakpoint.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-setbreakpoint.js
@@ -49,17 +49,14 @@ function safeEval(code) {
}
}
-function testArguments(dcp, arguments, success, is_script, is_script_reg_exp) {
+function testArguments(dcp, arguments, success, is_script) {
var request = '{' + base_request + ',"arguments":' + arguments + '}'
var json_response = dcp.processDebugJSONRequest(request);
var response = safeEval(json_response);
if (success) {
assertTrue(response.success, request + ' -> ' + json_response);
if (is_script) {
- if (is_script_reg_exp)
- assertEquals('scriptRegExp', response.body.type, request + ' -> ' + json_response);
- else
- assertEquals('scriptName', response.body.type, request + ' -> ' + json_response);
+ assertEquals('scriptName', response.body.type, request + ' -> ' + json_response);
} else {
assertEquals('scriptId', response.body.type, request + ' -> ' + json_response);
}
@@ -111,11 +108,6 @@ function listener(event, exec_state, event_data, data) {
testArguments(dcp, '{"type":"script","target":"test","line":1}', true, true);
testArguments(dcp, '{"type":"script","target":"test","column":1}', true, true);
- testArguments(dcp, '{"type":"scriptRegExp","target":"test"}', true, true, true);
- testArguments(dcp, '{"type":"scriptRegExp","target":"test"}', true, true, true);
- testArguments(dcp, '{"type":"scriptRegExp","target":"test","line":1}', true, true, true);
- testArguments(dcp, '{"type":"scriptRegExp","target":"test","column":1}', true, true, true);
-
testArguments(dcp, '{"type":"scriptId","target":' + f_script_id + ',"line":' + f_line + '}', true, false);
testArguments(dcp, '{"type":"scriptId","target":' + g_script_id + ',"line":' + g_line + '}', true, false);
testArguments(dcp, '{"type":"scriptId","target":' + h_script_id + ',"line":' + h_line + '}', true, false);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-step-3.js b/src/3rdparty/v8/test/mjsunit/debug-step-3.js
index ad03667..9cac0f5 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-step-3.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-step-3.js
@@ -82,8 +82,7 @@ bp = Debug.setBreakPoint(f, 3);
// Step through the function ensuring that the var statements are hit as well.
prepare_step_test();
f();
-// TODO(1782): Fix issue to bring back this assert.
-//assertEquals(4, step_count);
+assertEquals(4, step_count);
// Clear the breakpoint and check that no stepping happens.
Debug.clearBreakPoint(bp);
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepin-accessor.js b/src/3rdparty/v8/test/mjsunit/debug-stepin-accessor.js
index 2c9c8c3..70acd5e 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-stepin-accessor.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepin-accessor.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -112,8 +112,8 @@ function testGetter1_2() {
function testGetter1_3() {
expected_function_name = 'getter1';
expected_source_line_text = ' return this.name; // getter 1';
- debugger;
for (var i = 1; i < 2; i++) {
+ debugger;
var x = c['getter' + i];
}
}
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepin-function-call.js b/src/3rdparty/v8/test/mjsunit/debug-stepin-function-call.js
index 385fcb2..3b5240c 100644
--- a/src/3rdparty/v8/test/mjsunit/debug-stepin-function-call.js
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepin-function-call.js
@@ -135,8 +135,15 @@ function apply4() {
var yetAnotherLocal = 10;
}
+// Test step into bound function.
+function bind1() {
+ var bound = g.bind(null, 3);
+ debugger;
+ bound();
+}
+
var testFunctions =
- [call1, call2, call3, call4, apply1, apply2, apply3, apply4];
+ [call1, call2, call3, call4, apply1, apply2, apply3, apply4, bind1];
for (var i = 0; i < testFunctions.length; i++) {
state = 0;
@@ -145,5 +152,13 @@ for (var i = 0; i < testFunctions.length; i++) {
assertEquals(3, state);
}
+// Test global bound function.
+state = 0;
+var globalBound = g.bind(null, 3);
+debugger;
+globalBound();
+assertNull(exception);
+assertEquals(3, state);
+
// Get rid of the debug event listener.
Debug.setListener(null); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js
new file mode 100644
index 0000000..9c040da
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/debug-stepout-scope.js
@@ -0,0 +1,423 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --expose-natives-as=builtins
+
+// Check that the ScopeIterator can properly recreate the scope at
+// every point when stepping through functions.
+
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ // Access scope details.
+ var scope_count = exec_state.frame().scopeCount();
+ for (var i = 0; i < scope_count; i++) {
+ var scope = exec_state.frame().scope(i);
+ // assertTrue(scope.isScope());
+ scope.scopeType();
+ scope.scopeObject();
+ }
+
+ // Do steps until we reach the global scope again.
+ if (true) {
+ exec_state.prepareStep(Debug.StepAction.StepInMin, 1);
+ }
+ }
+}
+
+Debug.setListener(listener);
+
+
+function test1() {
+ debugger;
+ with ({x:1}) {
+ x = 2;
+ }
+}
+test1();
+
+
+function test2() {
+ if (true) {
+ with ({}) {
+ debugger;
+ }
+ } else {
+ with ({}) {
+ return 10;
+ }
+ }
+}
+test2();
+
+
+function test3() {
+ if (true) {
+ debugger;
+ } else {
+ with ({}) {
+ return 10;
+ }
+ }
+}
+test3();
+
+
+function test4() {
+ debugger;
+ with ({x:1}) x = 1
+}
+test4();
+
+
+function test5() {
+ debugger;
+ var dummy = 1;
+ with ({}) {
+ with ({}) {
+ dummy = 2;
+ }
+ }
+ dummy = 3;
+}
+test5();
+
+
+function test6() {
+ debugger;
+ try {
+ throw 'stuff';
+ } catch (e) {
+ e = 1;
+ }
+}
+test6();
+
+
+function test7() {
+ debugger;
+ function foo() {}
+}
+test7();
+
+
+function test8() {
+ debugger;
+ (function foo() {})();
+}
+test8();
+
+
+var q = 42;
+var prefixes = [ "debugger; ",
+ "if (false) { try { throw 0; } catch(x) { return x; } }; debugger; " ];
+var bodies = [ "1",
+ "1 ",
+ "1;",
+ "1; ",
+ "q",
+ "q ",
+ "q;",
+ "q; ",
+ "try { throw 'stuff' } catch (e) { e = 1; }",
+ "try { throw 'stuff' } catch (e) { e = 1; } ",
+ "try { throw 'stuff' } catch (e) { e = 1; };",
+ "try { throw 'stuff' } catch (e) { e = 1; }; " ];
+var with_bodies = [ "with ({}) {}",
+ "with ({x:1}) x",
+ "with ({x:1}) x = 1",
+ "with ({x:1}) x ",
+ "with ({x:1}) x = 1 ",
+ "with ({x:1}) x;",
+ "with ({x:1}) x = 1;",
+ "with ({x:1}) x; ",
+ "with ({x:1}) x = 1; " ];
+
+
+function test9() {
+ debugger;
+ for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ eval(pre + body);
+ eval("'use strict'; " + pre + body);
+ }
+ for (var j = 0; j < with_bodies.length; ++j) {
+ var body = with_bodies[j];
+ eval(pre + body);
+ }
+ }
+}
+test9();
+
+
+function test10() {
+ debugger;
+ with ({}) {
+ return 10;
+ }
+}
+test10();
+
+
+function test11() {
+ debugger;
+ try {
+ throw 'stuff';
+ } catch (e) {
+ return 10;
+ }
+}
+test11();
+
+
+// Test global eval and function constructor.
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ for (var j = 0; j < bodies.length; ++j) {
+ var body = bodies[j];
+ eval(pre + body);
+ eval("'use strict'; " + pre + body);
+ Function(pre + body)();
+ }
+ for (var j = 0; j < with_bodies.length; ++j) {
+ var body = with_bodies[j];
+ eval(pre + body);
+ Function(pre + body)();
+ }
+}
+
+
+try {
+ with({}) {
+ debugger;
+ eval("{}$%:^");
+ }
+} catch(e) {
+ nop();
+}
+
+// Return from function constructed with Function constructor.
+var anon = 12;
+for (var i = 0; i < prefixes.length; ++i) {
+ var pre = prefixes[i];
+ Function(pre + "return 42")();
+ Function(pre + "return 42 ")();
+ Function(pre + "return 42;")();
+ Function(pre + "return 42; ")();
+ Function(pre + "return anon")();
+ Function(pre + "return anon ")();
+ Function(pre + "return anon;")();
+ Function(pre + "return anon; ")();
+}
+
+
+function nop() {}
+
+
+function stress() {
+ debugger;
+
+ L: with ({x:12}) {
+ break L;
+ }
+
+
+ with ({x: 'outer'}) {
+ label: {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ label: {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ nop();
+ }
+
+
+ with ({x: 'outer'}) {
+ label: {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ label: {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ }
+ nop();
+ }
+
+
+ with ({x: 'outer'}) {
+ for (var i = 0; i < 3; ++i) {
+ with ({x: 'inner' + i}) {
+ continue;
+ }
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ label: for (var i = 0; i < 3; ++i) {
+ with ({x: 'middle' + i}) {
+ for (var j = 0; j < 3; ++j) {
+ with ({x: 'inner' + j}) {
+ continue label;
+ }
+ }
+ }
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ nop();
+ }
+ }
+
+
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ }
+ } catch (e) {
+ }
+ }
+
+
+ try {
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } finally {
+ }
+ }
+ } catch (e) {
+ }
+
+
+ try {
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } finally {
+ nop();
+ }
+ }
+ } catch (e) {
+ }
+
+
+ function stress1() {
+ with ({x:12}) {
+ return x;
+ }
+ }
+ stress1();
+
+
+ function stress2() {
+ with ({x: 'outer'}) {
+ with ({x: 'inner'}) {
+ return x;
+ }
+ }
+ }
+ stress2();
+
+ function stress3() {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ return e;
+ }
+ }
+ stress3();
+
+
+ function stress4() {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ with ({x: 'inner'}) {
+ return e;
+ }
+ }
+ }
+ stress4();
+
+}
+stress();
+
+
+// With block as the last(!) statement in global code.
+with ({}) { debugger; } \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/elements-kind.js b/src/3rdparty/v8/test/mjsunit/elements-kind.js
index cfd47c7..4aa79de 100644
--- a/src/3rdparty/v8/test/mjsunit/elements-kind.js
+++ b/src/3rdparty/v8/test/mjsunit/elements-kind.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -34,7 +34,7 @@
// in this test case. Depending on whether smi-only arrays are actually
// enabled, this test takes the appropriate code path to check smi-only arrays.
-support_smi_only_arrays = %HasFastSmiOnlyElements([]);
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
if (support_smi_only_arrays) {
print("Tests include smi-only arrays.");
@@ -108,11 +108,13 @@ me.dance = 0xD15C0;
me.drink = 0xC0C0A;
assertKind(elements_kind.fast, me);
-var too = [1,2,3];
-assertKind(elements_kind.fast_smi_only, too);
-too.dance = 0xD15C0;
-too.drink = 0xC0C0A;
-assertKind(elements_kind.fast_smi_only, too);
+if (support_smi_only_arrays) {
+ var too = [1,2,3];
+ assertKind(elements_kind.fast_smi_only, too);
+ too.dance = 0xD15C0;
+ too.drink = 0xC0C0A;
+ assertKind(elements_kind.fast_smi_only, too);
+}
// Make sure the element kind transitions from smionly when a non-smi is stored.
var you = new Array();
@@ -145,6 +147,7 @@ assertKind(elements_kind.external_pixel, new PixelArray(512));
// Crankshaft support for smi-only array elements.
function monomorphic(array) {
+ assertKind(elements_kind.fast_smi_only, array);
for (var i = 0; i < 3; i++) {
array[i] = i + 10;
}
@@ -154,7 +157,8 @@ function monomorphic(array) {
assertEquals(i + 10, a);
}
}
-var smi_only = [1, 2, 3];
+var smi_only = new Array(1, 2, 3);
+assertKind(elements_kind.fast_smi_only, smi_only);
for (var i = 0; i < 3; i++) monomorphic(smi_only);
%OptimizeFunctionOnNextCall(monomorphic);
monomorphic(smi_only);
@@ -232,15 +236,17 @@ if (support_smi_only_arrays) {
function get(foo) { return foo; } // Used to generate dynamic values.
function crankshaft_test() {
- var a = [get(1), get(2), get(3)];
- assertKind(elements_kind.fast_smi_only, a);
+ if (support_smi_only_arrays) {
+ var a1 = [get(1), get(2), get(3)];
+ assertKind(elements_kind.fast_smi_only, a1);
+ }
+ var a2 = new Array(get(1), get(2), get(3));
+ assertKind(elements_kind.fast_smi_only, a2);
var b = [get(1), get(2), get("three")];
assertKind(elements_kind.fast, b);
var c = [get(1), get(2), get(3.5)];
if (support_smi_only_arrays) {
assertKind(elements_kind.fast_double, c);
- } else {
- assertKind(elements_kind.fast, c);
}
}
for (var i = 0; i < 3; i++) {
@@ -305,5 +311,36 @@ if (support_smi_only_arrays) {
assertTrue(%HaveSameMap(e, f));
}
+// Test if Array.concat() works correctly with DOUBLE elements.
+if (support_smi_only_arrays) {
+ var a = [1, 2];
+ assertKind(elements_kind.fast_smi_only, a);
+ var b = [4.5, 5.5];
+ assertKind(elements_kind.fast_double, b);
+ var c = a.concat(b);
+ assertEquals([1, 2, 4.5, 5.5], c);
+ // TODO(1810): Change implementation so that we get DOUBLE elements here?
+ assertKind(elements_kind.fast, c);
+}
+
+// Test that Array.push() correctly handles SMI elements.
+if (support_smi_only_arrays) {
+ var a = [1, 2];
+ assertKind(elements_kind.fast_smi_only, a);
+ a.push(3, 4, 5);
+ assertKind(elements_kind.fast_smi_only, a);
+ assertEquals([1, 2, 3, 4, 5], a);
+}
+
+// Test that Array.splice() and Array.slice() return correct ElementsKinds.
+if (support_smi_only_arrays) {
+ var a = ["foo", "bar"];
+ assertKind(elements_kind.fast, a);
+ var b = a.splice(0, 1);
+ assertKind(elements_kind.fast, b);
+ var c = a.slice(0, 1);
+ assertKind(elements_kind.fast, c);
+}
+
// Throw away type information in the ICs for next stress run.
gc();
diff --git a/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js b/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js
new file mode 100644
index 0000000..5e78f10
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/elements-transition-hoisting.js
@@ -0,0 +1,211 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --smi-only-arrays --expose-gc
+
+// Ensure that ElementsKind transitions in various situations are hoisted (or
+// not hoisted) correctly, don't change the semantics programs and don't trigger
+// deopt through hoisting in important situations.
+
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6));
+
+if (support_smi_only_arrays) {
+ print("Tests include smi-only arrays.");
+} else {
+ print("Tests do NOT include smi-only arrays.");
+}
+
+// Force existing ICs from previous stress runs to be flushed, otherwise the
+// assumptions in this test about when deoptimizations get triggered are not
+// valid.
+gc();
+
+if (support_smi_only_arrays) {
+ // Make sure that a simple elements array transitions inside a loop before
+ // stores to an array gets hoisted in a way that doesn't generate a deopt in
+ // simple cases.}
+ function testDoubleConversion4(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ a[0] = object;
+ } while (--count > 0);
+ }
+
+ testDoubleConversion4(new Array(5));
+ %OptimizeFunctionOnNextCall(testDoubleConversion4);
+ testDoubleConversion4(new Array(5));
+ testDoubleConversion4(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testDoubleConversion4));
+
+ // Make sure that non-element related map checks that are not preceded by
+ // transitions in a loop still get hoisted in a way that doesn't generate a
+ // deopt in simple cases.
+ function testExactMapHoisting(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ a.foo = object; // This map check should be hoistable
+ a[1] = object;
+ result = a.foo == object && a[1] == object;
+ } while (--count > 0);
+ }
+
+ testExactMapHoisting(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting);
+ testExactMapHoisting(new Array(5));
+ testExactMapHoisting(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting));
+
+ // Make sure that non-element related map checks do NOT get hoisted if they
+ // depend on an elements transition before them and it's not possible to hoist
+ // that transition.
+ function testExactMapHoisting2(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ if (a.bar === undefined) {
+ a[1] = 2.5;
+ }
+ a.foo = object; // This map check should NOT be hoistable because it
+ // includes a check for the FAST_ELEMENTS map as well as
+ // the FAST_DOUBLE_ELEMENTS map, which depends on the
+ // double transition above in the if, which cannot be
+ // hoisted.
+ } while (--count > 0);
+ }
+
+ testExactMapHoisting2(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting2);
+ testExactMapHoisting2(new Array(5));
+ testExactMapHoisting2(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting2));
+
+ // Make sure that non-element related map checks do get hoisted if they use
+ // the transitioned map for the check and all transitions that they depend
+ // upon can hoisted, too.
+ function testExactMapHoisting3(a) {
+ var object = new Object();
+ a.foo = 0;
+ a[0] = 0;
+ a[1] = 1;
+ var count = 3;
+ do {
+ a[1] = 2.5;
+ a.foo = object; // This map check should be hoistable because all elements
+ // transitions in the loop can also be hoisted.
+ } while (--count > 0);
+ }
+
+ var add_transition = new Array(5);
+ add_transition.foo = 0;
+ add_transition[0] = new Object(); // For FAST_ELEMENT transition to be created
+ testExactMapHoisting3(new Array(5));
+ %OptimizeFunctionOnNextCall(testExactMapHoisting3);
+ testExactMapHoisting3(new Array(5));
+ testExactMapHoisting3(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testExactMapHoisting3));
+
+ function testDominatingTransitionHoisting1(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ if (a.baz != true) {
+ a[1] = 2.5;
+ }
+ a[0] = object;
+ } while (--count > 3);
+ }
+
+ testDominatingTransitionHoisting1(new Array(5));
+ %OptimizeFunctionOnNextCall(testDominatingTransitionHoisting1);
+ testDominatingTransitionHoisting1(new Array(5));
+ testDominatingTransitionHoisting1(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testDominatingTransitionHoisting1));
+
+ function testHoistingWithSideEffect(a) {
+ var object = new Object();
+ a[0] = 0;
+ var count = 3;
+ do {
+ assertTrue(true);
+ a[0] = object;
+ } while (--count > 3);
+ }
+
+ testHoistingWithSideEffect(new Array(5));
+ %OptimizeFunctionOnNextCall(testHoistingWithSideEffect);
+ testHoistingWithSideEffect(new Array(5));
+ testHoistingWithSideEffect(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testHoistingWithSideEffect));
+
+ function testStraightLineDupeElinination(a,b,c,d,e,f) {
+ var count = 3;
+ do {
+ assertTrue(true);
+ a[0] = b;
+ a[1] = c;
+ a[2] = d;
+ assertTrue(true);
+ a[3] = e; // TransitionElementsKind should be eliminated despite call.
+ a[4] = f;
+ } while (--count > 3);
+ }
+
+ testStraightLineDupeElinination(new Array(0, 0, 0, 0, 0),0,0,0,0,.5);
+ testStraightLineDupeElinination(new Array(0, 0, 0, 0, 0),0,0,0,.5,0);
+ testStraightLineDupeElinination(new Array(0, 0, 0, 0, 0),0,0,.5,0,0);
+ testStraightLineDupeElinination(new Array(0, 0, 0, 0, 0),0,.5,0,0,0);
+ testStraightLineDupeElinination(new Array(0, 0, 0, 0, 0),.5,0,0,0,0);
+ testStraightLineDupeElinination(new Array(.1,.1,.1,.1,.1),0,0,0,0,.5);
+ testStraightLineDupeElinination(new Array(.1,.1,.1,.1,.1),0,0,0,.5,0);
+ testStraightLineDupeElinination(new Array(.1,.1,.1,.1,.1),0,0,.5,0,0);
+ testStraightLineDupeElinination(new Array(.1,.1,.1,.1,.1),0,.5,0,0,0);
+ testStraightLineDupeElinination(new Array(.1,.1,.1,.1,.1),.5,0,0,0,0);
+ testStraightLineDupeElinination(new Array(5),.5,0,0,0,0);
+ testStraightLineDupeElinination(new Array(5),0,.5,0,0,0);
+ testStraightLineDupeElinination(new Array(5),0,0,.5,0,0);
+ testStraightLineDupeElinination(new Array(5),0,0,0,.5,0);
+ testStraightLineDupeElinination(new Array(5),0,0,0,0,.5);
+ testStraightLineDupeElinination(new Array(5),.5,0,0,0,0);
+ testStraightLineDupeElinination(new Array(5),0,.5,0,0,0);
+ testStraightLineDupeElinination(new Array(5),0,0,.5,0,0);
+ testStraightLineDupeElinination(new Array(5),0,0,0,.5,0);
+ testStraightLineDupeElinination(new Array(5),0,0,0,0,.5);
+ %OptimizeFunctionOnNextCall(testStraightLineDupeElinination);
+ testStraightLineDupeElinination(new Array(5));
+ testStraightLineDupeElinination(new Array(5));
+ assertTrue(2 != %GetOptimizationStatus(testStraightLineDupeElinination));
+}
diff --git a/src/3rdparty/v8/test/mjsunit/elements-transition.js b/src/3rdparty/v8/test/mjsunit/elements-transition.js
index 5f6cc4f..60e051b 100644
--- a/src/3rdparty/v8/test/mjsunit/elements-transition.js
+++ b/src/3rdparty/v8/test/mjsunit/elements-transition.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,7 +27,13 @@
// Flags: --allow-natives-syntax --smi-only-arrays
-support_smi_only_arrays = %HasFastSmiOnlyElements([]);
+support_smi_only_arrays = %HasFastSmiOnlyElements(new Array(1,2,3,4,5,6,7,8));
+
+if (support_smi_only_arrays) {
+ print("Tests include smi-only arrays.");
+} else {
+ print("Tests do NOT include smi-only arrays.");
+}
if (support_smi_only_arrays) {
function test(test_double, test_object, set, length) {
@@ -104,4 +110,4 @@ if (support_smi_only_arrays) {
assertEquals(1, b[0]);
} else {
print("Test skipped because smi only arrays are not supported.");
-} \ No newline at end of file
+}
diff --git a/src/3rdparty/v8/test/mjsunit/external-array.js b/src/3rdparty/v8/test/mjsunit/external-array.js
index 81c6cfe..32f78a7 100644
--- a/src/3rdparty/v8/test/mjsunit/external-array.js
+++ b/src/3rdparty/v8/test/mjsunit/external-array.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -43,6 +43,50 @@ f(a);
assertEquals(0, a[0]);
assertEquals(0, a[1]);
+// No-parameter constructor should fail right now.
+function abfunc1() {
+ return new ArrayBuffer();
+}
+assertThrows(abfunc1);
+
+// Test derivation from an ArrayBuffer
+var ab = new ArrayBuffer(12);
+var derived_uint8 = new Uint8Array(ab);
+assertEquals(12, derived_uint8.length);
+var derived_uint32 = new Uint32Array(ab);
+assertEquals(3, derived_uint32.length);
+var derived_uint32_2 = new Uint32Array(ab,4);
+assertEquals(2, derived_uint32_2.length);
+var derived_uint32_3 = new Uint32Array(ab,4,1);
+assertEquals(1, derived_uint32_3.length);
+
+// If a given byteOffset and length references an area beyond the end of the
+// ArrayBuffer an exception is raised.
+function abfunc3() {
+ new Uint32Array(ab,4,3);
+}
+assertThrows(abfunc3);
+function abfunc4() {
+ new Uint32Array(ab,16);
+}
+assertThrows(abfunc4);
+
+// The given byteOffset must be a multiple of the element size of the specific
+// type, otherwise an exception is raised.
+function abfunc5() {
+ new Uint32Array(ab,5);
+}
+assertThrows(abfunc5);
+
+// If length is not explicitly specified, the length of the ArrayBuffer minus
+// the byteOffset must be a multiple of the element size of the specific type,
+// or an exception is raised.
+var ab2 = new ArrayBuffer(13);
+function abfunc6() {
+ new Uint32Array(ab2,4);
+}
+assertThrows(abfunc6);
+
// Test the correct behavior of the |BYTES_PER_ELEMENT| property (which is
// "constant", but not read-only).
a = new Int32Array(2);
@@ -273,3 +317,37 @@ for (var t = 0; t < types.length; t++) {
%DeoptimizeFunction(array_load_set_smi_check2);
gc(); // Makes V8 forget about type information for array_load_set_smi_check.
}
+
+// Check handling of undefined in 32- and 64-bit external float arrays.
+
+function store_float32_undefined(ext_array) {
+ ext_array[0] = undefined;
+}
+
+var float32_array = new Float32Array(1);
+// Make sure runtime does it right
+store_float32_undefined(float32_array);
+assertTrue(isNaN(float32_array[0]));
+// Make sure the ICs do it right
+store_float32_undefined(float32_array);
+assertTrue(isNaN(float32_array[0]));
+// Make sure that Cranskshft does it right.
+%OptimizeFunctionOnNextCall(store_float32_undefined);
+store_float32_undefined(float32_array);
+assertTrue(isNaN(float32_array[0]));
+
+function store_float64_undefined(ext_array) {
+ ext_array[0] = undefined;
+}
+
+var float64_array = new Float64Array(1);
+// Make sure runtime does it right
+store_float64_undefined(float64_array);
+assertTrue(isNaN(float64_array[0]));
+// Make sure the ICs do it right
+store_float64_undefined(float64_array);
+assertTrue(isNaN(float64_array[0]));
+// Make sure that Cranskshft does it right.
+%OptimizeFunctionOnNextCall(store_float64_undefined);
+store_float64_undefined(float64_array);
+assertTrue(isNaN(float64_array[0]));
diff --git a/src/3rdparty/v8/test/mjsunit/function-call.js b/src/3rdparty/v8/test/mjsunit/function-call.js
index 06479ad..26890ed 100644
--- a/src/3rdparty/v8/test/mjsunit/function-call.js
+++ b/src/3rdparty/v8/test/mjsunit/function-call.js
@@ -68,8 +68,7 @@ var should_throw_on_null_and_undefined =
String.prototype.toUpperCase,
String.prototype.toLocaleUpperCase,
String.prototype.trim,
- Number.prototype.toLocaleString,
- Error.prototype.toString];
+ Number.prototype.toLocaleString];
// Non generic natives do not work on any input other than the specific
// type, but since this change will allow call to be invoked with undefined
@@ -134,7 +133,8 @@ var non_generic =
Date.prototype.toJSON,
RegExp.prototype.exec,
RegExp.prototype.test,
- RegExp.prototype.toString];
+ RegExp.prototype.toString,
+ Error.prototype.toString];
// Mapping functions.
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-extension.h b/src/3rdparty/v8/test/mjsunit/function-named-self-reference.js
index 5401f25..5b03b09 100644
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-extension.h
+++ b/src/3rdparty/v8/test/mjsunit/function-named-self-reference.js
@@ -25,30 +25,21 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_EXTENSION_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_I18N_EXTENSION_H_
+// Flags: --allow-natives-syntax
-#include "include/v8.h"
+var fn = function fn(val) {
+ if (val) return val;
-namespace v8 {
-namespace internal {
+ %OptimizeFunctionOnNextCall(fn);
+ function run(val) {
+ var res = fn((val + 1) << 1);
-class I18NExtension : public v8::Extension {
- public:
- I18NExtension();
+ return res;
+ }
- virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
- v8::Handle<v8::String> name);
+ return run(0);
+}
- // V8 code prefers Register, while Chrome and WebKit use get kind of methods.
- static void Register();
- static I18NExtension* get();
-
- private:
- static I18NExtension* extension_;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_EXTENSION_H_
+var res = fn();
+assertEquals(res, 2);
diff --git a/src/3rdparty/v8/test/mjsunit/fuzz-natives.js b/src/3rdparty/v8/test/mjsunit/fuzz-natives.js
index ff6677e..2965e74 100644
--- a/src/3rdparty/v8/test/mjsunit/fuzz-natives.js
+++ b/src/3rdparty/v8/test/mjsunit/fuzz-natives.js
@@ -163,6 +163,9 @@ var knownProblems = {
"PromoteScheduledException": true,
"DeleteHandleScopeExtensions": true,
+ // Vararg with minimum number > 0.
+ "Call": true,
+
// Requires integer arguments to be non-negative.
"Apply": true,
@@ -181,8 +184,9 @@ var knownProblems = {
"RegExpConstructResult": true,
"_RegExpConstructResult": true,
- // This function performs some checks compile time (it requires its first
- // argument to be a compile time smi).
+ // This functions perform some checks compile time (they require one of their
+ // arguments to be a compile time smi).
+ "_DateField": true,
"_GetFromCache": true,
// This function expects its first argument to be a non-smi.
diff --git a/src/3rdparty/v8/test/mjsunit/get-own-property-descriptor.js b/src/3rdparty/v8/test/mjsunit/get-own-property-descriptor.js
index abb2420..159c63b 100644
--- a/src/3rdparty/v8/test/mjsunit/get-own-property-descriptor.js
+++ b/src/3rdparty/v8/test/mjsunit/get-own-property-descriptor.js
@@ -73,7 +73,7 @@ assertEquals(descObjectElement.value, 42);
var a = new String('foobar');
for (var i = 0; i < a.length; i++) {
var descStringObject = Object.getOwnPropertyDescriptor(a, i);
- assertFalse(descStringObject.enumerable);
+ assertTrue(descStringObject.enumerable);
assertFalse(descStringObject.configurable);
assertFalse(descStringObject.writable);
assertEquals(descStringObject.value, a.substring(i, i+1));
diff --git a/src/3rdparty/v8/test/mjsunit/getter-in-value-prototype.js b/src/3rdparty/v8/test/mjsunit/getter-in-value-prototype.js
index b55320a..abe2cb1 100644
--- a/src/3rdparty/v8/test/mjsunit/getter-in-value-prototype.js
+++ b/src/3rdparty/v8/test/mjsunit/getter-in-value-prototype.js
@@ -31,5 +31,5 @@
// JSObject.
String.prototype.__defineGetter__('x', function() { return this; });
-assertEquals('asdf', 'asdf'.x);
+assertEquals(Object('asdf'), 'asdf'.x);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js b/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
index e27d6a1..8388504 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-conflicts.js
@@ -29,6 +29,9 @@
// Test for conflicting variable bindings.
+// TODO(ES6): properly activate extended mode
+"use strict";
+
function CheckException(e) {
var string = e.toString();
assertTrue(string.indexOf("has already been declared") >= 0 ||
@@ -127,5 +130,5 @@ for (var v = 0; v < varbinds.length; ++v) {
// Test conflicting parameter/var bindings.
for (var v = 0; v < varbinds.length; ++v) {
- TestConflict('(function (x) { ' + varbinds[v] + '; })()');
+ TestNoConflict('(function (x) { ' + varbinds[v] + '; })()');
}
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-const-assign.js b/src/3rdparty/v8/test/mjsunit/harmony/block-const-assign.js
new file mode 100644
index 0000000..8297a55
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-const-assign.js
@@ -0,0 +1,131 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-scoping
+
+// Test that we throw early syntax errors in harmony mode
+// when using an immutable binding in an assigment or with
+// prefix/postfix decrement/increment operators.
+// TODO(ES6): properly activate extended mode
+"use strict";
+
+
+// Function local const.
+function constDecl0(use) {
+ return "(function() { const constvar = 1; " + use + "; });";
+}
+
+
+function constDecl1(use) {
+ return "(function() { " + use + "; const constvar = 1; });";
+}
+
+
+// Function local const, assign from eval.
+function constDecl2(use) {
+ use = "eval('(function() { " + use + " })')";
+ return "(function() { const constvar = 1; " + use + "; })();";
+}
+
+
+function constDecl3(use) {
+ use = "eval('(function() { " + use + " })')";
+ return "(function() { " + use + "; const constvar = 1; })();";
+}
+
+
+// Block local const.
+function constDecl4(use) {
+ return "(function() { { const constvar = 1; " + use + "; } });";
+}
+
+
+function constDecl5(use) {
+ return "(function() { { " + use + "; const constvar = 1; } });";
+}
+
+
+// Block local const, assign from eval.
+function constDecl6(use) {
+ use = "eval('(function() {" + use + "})')";
+ return "(function() { { const constvar = 1; " + use + "; } })();";
+}
+
+
+function constDecl7(use) {
+ use = "eval('(function() {" + use + "})')";
+ return "(function() { { " + use + "; const constvar = 1; } })();";
+}
+
+
+// Function expression name.
+function constDecl8(use) {
+ return "(function constvar() { " + use + "; });";
+}
+
+
+// Function expression name, assign from eval.
+function constDecl9(use) {
+ use = "eval('(function(){" + use + "})')";
+ return "(function constvar() { " + use + "; })();";
+}
+
+let decls = [ constDecl0,
+ constDecl1,
+ constDecl2,
+ constDecl3,
+ constDecl4,
+ constDecl5,
+ constDecl6,
+ constDecl7,
+ constDecl8,
+ constDecl9
+ ];
+let uses = [ 'constvar = 1;',
+ 'constvar += 1;',
+ '++constvar;',
+ 'constvar++;'
+ ];
+
+function Test(d,u) {
+ 'use strict';
+ try {
+ print(d(u));
+ eval(d(u));
+ } catch (e) {
+ assertInstanceof(e, SyntaxError);
+ assertTrue(e.toString().indexOf("Assignment to constant variable") >= 0);
+ return;
+ }
+ assertUnreachable();
+}
+
+for (var d = 0; d < decls.length; ++d) {
+ for (var u = 0; u < uses.length; ++u) {
+ Test(decls[d], uses[u]);
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-early-errors.js b/src/3rdparty/v8/test/mjsunit/harmony/block-early-errors.js
new file mode 100644
index 0000000..791f001
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-early-errors.js
@@ -0,0 +1,55 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-scoping
+
+function CheckException(e) {
+ var string = e.toString();
+ assertInstanceof(e, SyntaxError);
+ assertTrue(string.indexOf("Illegal let") >= 0);
+}
+
+function Check(str) {
+ try {
+ eval("(function () { " + str + " })");
+ assertUnreachable();
+ } catch (e) {
+ CheckException(e);
+ }
+ try {
+ eval("(function () { { " + str + " } })");
+ assertUnreachable();
+ } catch (e) {
+ CheckException(e);
+ }
+}
+
+// Check for early syntax errors when using let
+// declarations outside of extended mode.
+Check("let x;");
+Check("let x = 1;");
+Check("let x, y;");
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-for.js b/src/3rdparty/v8/test/mjsunit/harmony/block-for.js
index 1f68037..e84f0d2 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-for.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-for.js
@@ -27,6 +27,9 @@
// Flags: --harmony-scoping
+// TODO(ES6): properly activate extended mode
+"use strict";
+
function props(x) {
var array = [];
for (let p in x) array.push(p);
@@ -90,12 +93,13 @@ assertEquals('ab', result);
// Check that there is exactly one variable without initializer
// in a for-in statement with let variables.
-assertThrows("function foo() { for (let in {}) { } }", SyntaxError);
-assertThrows("function foo() { for (let x = 3 in {}) { } }", SyntaxError);
-assertThrows("function foo() { for (let x, y in {}) { } }", SyntaxError);
-assertThrows("function foo() { for (let x = 3, y in {}) { } }", SyntaxError);
-assertThrows("function foo() { for (let x, y = 4 in {}) { } }", SyntaxError);
-assertThrows("function foo() { for (let x = 3, y = 4 in {}) { } }", SyntaxError);
+// TODO(ES6): properly activate extended mode
+assertThrows("function foo() { 'use strict'; for (let in {}) { } }", SyntaxError);
+assertThrows("function foo() { 'use strict'; for (let x = 3 in {}) { } }", SyntaxError);
+assertThrows("function foo() { 'use strict'; for (let x, y in {}) { } }", SyntaxError);
+assertThrows("function foo() { 'use strict'; for (let x = 3, y in {}) { } }", SyntaxError);
+assertThrows("function foo() { 'use strict'; for (let x, y = 4 in {}) { } }", SyntaxError);
+assertThrows("function foo() { 'use strict'; for (let x = 3, y = 4 in {}) { } }", SyntaxError);
// In a normal for statement the iteration variable is not
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-leave.js b/src/3rdparty/v8/test/mjsunit/harmony/block-leave.js
index e6e3cef..a7f6b69 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-leave.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-leave.js
@@ -27,6 +27,9 @@
// Flags: --harmony-scoping
+// TODO(ES6): properly activate extended mode
+"use strict";
+
// We want to test the context chain shape. In each of the tests cases
// below, the outer with is to force a runtime lookup of the identifier 'x'
// to actually verify that the inner context has been discarded. A static
@@ -64,31 +67,30 @@ try {
} catch (e) {
caught = true;
assertEquals(25, e);
- with ({y:19}) {
- assertEquals(19, y);
+ (function () {
try {
// NOTE: This checks that the block scope containing xx has been
// removed from the context chain.
- xx;
+ eval('xx');
assertTrue(false); // should not reach here
} catch (e2) {
assertTrue(e2 instanceof ReferenceError);
}
- }
+ })();
}
assertTrue(caught);
-with ({x: 'outer'}) {
+(function(x) {
label: {
let x = 'inner';
break label;
}
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
label: {
let x = 'middle';
{
@@ -96,20 +98,20 @@ with ({x: 'outer'}) {
break label;
}
}
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
for (var i = 0; i < 10; ++i) {
let x = 'inner' + i;
continue;
}
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
label: for (var i = 0; i < 10; ++i) {
let x = 'middle' + i;
for (var j = 0; j < 10; ++j) {
@@ -117,21 +119,21 @@ with ({x: 'outer'}) {
continue label;
}
}
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
try {
let x = 'inner';
throw 0;
} catch (e) {
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
-}
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
try {
let x = 'middle';
{
@@ -139,27 +141,27 @@ with ({x: 'outer'}) {
throw 0;
}
} catch (e) {
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
-}
+})('outer');
try {
- with ({x: 'outer'}) {
+ (function(x) {
try {
let x = 'inner';
throw 0;
} finally {
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
- }
+ })('outer');
} catch (e) {
if (e instanceof MjsUnitAssertionError) throw e;
}
try {
- with ({x: 'outer'}) {
+ (function(x) {
try {
let x = 'middle';
{
@@ -167,9 +169,9 @@ try {
throw 0;
}
} finally {
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
- }
+ })('outer');
} catch (e) {
if (e instanceof MjsUnitAssertionError) throw e;
}
@@ -179,47 +181,47 @@ try {
// from with.
function f() {}
-with ({x: 'outer'}) {
+(function(x) {
label: {
let x = 'inner';
break label;
}
f(); // The context could be restored from the stack after the call.
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
for (var i = 0; i < 10; ++i) {
let x = 'inner';
continue;
}
f();
- assertEquals('outer', x);
-}
+ assertEquals('outer', eval('x'));
+})('outer');
-with ({x: 'outer'}) {
+(function(x) {
try {
let x = 'inner';
throw 0;
} catch (e) {
f();
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
-}
+})('outer');
try {
- with ({x: 'outer'}) {
+ (function(x) {
try {
let x = 'inner';
throw 0;
} finally {
f();
- assertEquals('outer', x);
+ assertEquals('outer', eval('x'));
}
- }
+ })('outer');
} catch (e) {
if (e instanceof MjsUnitAssertionError) throw e;
}
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js b/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
index 98d1464..1db1792 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-let-crankshaft.js
@@ -27,8 +27,207 @@
// Flags: --harmony-scoping --allow-natives-syntax
+// TODO(ES6): properly activate extended mode
+"use strict";
+
+// Check that the following functions are optimizable.
+var functions = [ f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13, f14,
+ f15, f16, f17, f18, f19, f20, f21, f22, f23 ];
+
+for (var i = 0; i < functions.length; ++i) {
+ var func = functions[i];
+ print("Testing:");
+ print(func);
+ for (var j = 0; j < 10; ++j) {
+ func(12);
+ }
+ %OptimizeFunctionOnNextCall(func);
+ func(12);
+ assertTrue(%GetOptimizationStatus(func) != 2);
+}
+
+function f1() { }
+
+function f2(x) { }
+
+function f3() {
+ let x;
+}
+
+function f4() {
+ function foo() {
+ }
+}
+
+function f5() {
+ let x = 1;
+}
+
+function f6() {
+ const x = 1;
+}
+
+function f7(x) {
+ return x;
+}
+
+function f8() {
+ let x;
+ return x;
+}
+
+function f9() {
+ function x() {
+ }
+ return x;
+}
+
+function f10(x) {
+ x = 1;
+}
+
+function f11() {
+ let x;
+ x = 1;
+}
+
+function f12() {
+ function x() {};
+ x = 1;
+}
+
+function f13(x) {
+ (function() { x; });
+}
+
+function f14() {
+ let x;
+ (function() { x; });
+}
+
+function f15() {
+ function x() {
+ }
+ (function() { x; });
+}
+
+function f16() {
+ let x = 1;
+ (function() { x; });
+}
+
+function f17() {
+ const x = 1;
+ (function() { x; });
+}
+
+function f18(x) {
+ return x;
+ (function() { x; });
+}
+
+function f19() {
+ let x;
+ return x;
+ (function() { x; });
+}
+
+function f20() {
+ function x() {
+ }
+ return x;
+ (function() { x; });
+}
+
+function f21(x) {
+ x = 1;
+ (function() { x; });
+}
+
+function f22() {
+ let x;
+ x = 1;
+ (function() { x; });
+}
+
+function f23() {
+ function x() { }
+ x = 1;
+ (function() { x; });
+}
+
+
// Test that temporal dead zone semantics for function and block scoped
-// ket bindings are handled by the optimizing compiler.
+// let bindings are handled by the optimizing compiler.
+
+function TestFunctionLocal(s) {
+ 'use strict';
+ var func = eval("(function baz(){" + s + "; })");
+ print("Testing:");
+ print(func);
+ for (var i = 0; i < 5; ++i) {
+ try {
+ func();
+ assertUnreachable();
+ } catch (e) {
+ assertInstanceof(e, ReferenceError);
+ }
+ }
+ %OptimizeFunctionOnNextCall(func);
+ try {
+ func();
+ assertUnreachable();
+ } catch (e) {
+ assertInstanceof(e, ReferenceError);
+ }
+}
+
+function TestFunctionContext(s) {
+ 'use strict';
+ var func = eval("(function baz(){ " + s + "; (function() { x; }); })");
+ print("Testing:");
+ print(func);
+ for (var i = 0; i < 5; ++i) {
+ print(i);
+ try {
+ func();
+ assertUnreachable();
+ } catch (e) {
+ assertInstanceof(e, ReferenceError);
+ }
+ }
+ print("optimize");
+ %OptimizeFunctionOnNextCall(func);
+ try {
+ print("call");
+ func();
+ assertUnreachable();
+ } catch (e) {
+ print("catch");
+ assertInstanceof(e, ReferenceError);
+ }
+}
+
+function TestAll(s) {
+ TestFunctionLocal(s);
+ TestFunctionContext(s);
+}
+
+// Use before initialization in declaration statement.
+TestAll('let x = x + 1');
+TestAll('let x = x += 1');
+TestAll('let x = x++');
+TestAll('let x = ++x');
+TestAll('const x = x + 1');
+
+// Use before initialization in prior statement.
+TestAll('x + 1; let x;');
+TestAll('x = 1; let x;');
+TestAll('x += 1; let x;');
+TestAll('++x; let x;');
+TestAll('x++; let x;');
+TestAll('let y = x; const x = 1;');
+
function f(x, b) {
let y = (b ? y : x) + 42;
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-let-declaration.js b/src/3rdparty/v8/test/mjsunit/harmony/block-let-declaration.js
index a1acc28..480e033 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-let-declaration.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-let-declaration.js
@@ -28,6 +28,8 @@
// Flags: --harmony-scoping
// Test let declarations in various settings.
+// TODO(ES6): properly activate extended mode
+"use strict";
// Global
let x;
@@ -54,11 +56,11 @@ if (true) {
// an exception in eval code during parsing, before even compiling or executing
// the code. Thus the generated function is not called here.
function TestLocalThrows(str, expect) {
- assertThrows("(function(){" + str + "})", expect);
+ assertThrows("(function(){ 'use strict'; " + str + "})", expect);
}
function TestLocalDoesNotThrow(str) {
- assertDoesNotThrow("(function(){" + str + "})()");
+ assertDoesNotThrow("(function(){ 'use strict'; " + str + "})()");
}
// Test let declarations in statement positions.
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-let-semantics.js b/src/3rdparty/v8/test/mjsunit/harmony/block-let-semantics.js
index f45b72f..d14e7cd 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-let-semantics.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-let-semantics.js
@@ -27,6 +27,9 @@
// Flags: --harmony-scoping
+// TODO(ES6): properly activate extended mode
+"use strict";
+
// Test temporal dead zone semantics of let bound variables in
// function and block scopes.
@@ -127,7 +130,7 @@ TestAll('{ function k() { return 0; } }; k(); ');
// Test that a function declaration sees the scope it resides in.
function f2() {
- let m, n;
+ let m, n, o, p;
{
m = g;
function g() {
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/block-scoping.js b/src/3rdparty/v8/test/mjsunit/harmony/block-scoping.js
index 0d0526a..31194d9 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/block-scoping.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/block-scoping.js
@@ -28,6 +28,9 @@
// Flags: --allow-natives-syntax --harmony-scoping
// Test functionality of block scopes.
+// TODO(ES6): properly activate extended mode
+"use strict";
+
// Hoisting of var declarations.
function f1() {
{
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/collections.js b/src/3rdparty/v8/test/mjsunit/harmony/collections.js
index 1ad1c6f..412e6f1 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/collections.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/collections.js
@@ -52,6 +52,8 @@ TestValidMapCalls(new WeakMap);
function TestInvalidCalls(m) {
assertThrows(function () { m.get(undefined) }, TypeError);
assertThrows(function () { m.set(undefined, 0) }, TypeError);
+ assertThrows(function () { m.get(null) }, TypeError);
+ assertThrows(function () { m.set(null, 0) }, TypeError);
assertThrows(function () { m.get(0) }, TypeError);
assertThrows(function () { m.set(0, 0) }, TypeError);
assertThrows(function () { m.get('a-key') }, TypeError);
@@ -69,12 +71,17 @@ function TestSet(set, key) {
assertFalse(set.has(key));
}
function TestSetBehavior(set) {
- for (i = 0; i < 20; i++) {
+ for (var i = 0; i < 20; i++) {
TestSet(set, new Object);
+ TestSet(set, i);
+ TestSet(set, i / 100);
+ TestSet(set, 'key-' + i);
+ }
+ var keys = [ +0, -0, +Infinity, -Infinity, true, false, null, undefined ];
+ for (var i = 0; i < keys.length; i++) {
+ TestSet(set, keys[i]);
}
}
-TestSet(new Set, 23);
-TestSet(new Set, 'foo');
TestSetBehavior(new Set);
@@ -99,7 +106,7 @@ function TestMapBehavior2(m) {
TestMapping(m, i / 10, new Object);
TestMapping(m, 'key-' + i, new Object);
}
- var keys = [ +0, -0, +Infinity, -Infinity, true, false ];
+ var keys = [ +0, -0, +Infinity, -Infinity, true, false, null, undefined ];
for (var i = 0; i < keys.length; i++) {
TestMapping(m, keys[i], new Object);
}
@@ -184,7 +191,7 @@ function TestArbitrary(m) {
map[property] = value;
assertEquals(value, map[property]);
}
- for (i = 0; i < 20; i++) {
+ for (var i = 0; i < 20; i++) {
TestProperty(m, i, 'val' + i);
TestProperty(m, 'foo' + i, 'bar' + i);
}
@@ -267,6 +274,40 @@ var o = Object.create({}, { myValue: {
assertEquals(10, o.myValue);
+// Regression test for issue 1884: Invoking any of the methods for Harmony
+// maps, sets, or weak maps, with a wrong type of receiver should be throwing
+// a proper TypeError.
+var alwaysBogus = [ undefined, null, true, "x", 23, {} ];
+var bogusReceiversTestSet = [
+ { proto: Set.prototype,
+ funcs: [ 'add', 'has', 'delete' ],
+ receivers: alwaysBogus.concat([ new Map, new WeakMap ]),
+ },
+ { proto: Map.prototype,
+ funcs: [ 'get', 'set', 'has', 'delete' ],
+ receivers: alwaysBogus.concat([ new Set, new WeakMap ]),
+ },
+ { proto: WeakMap.prototype,
+ funcs: [ 'get', 'set', 'has', 'delete' ],
+ receivers: alwaysBogus.concat([ new Set, new Map ]),
+ },
+];
+function TestBogusReceivers(testSet) {
+ for (var i = 0; i < testSet.length; i++) {
+ var proto = testSet[i].proto;
+ var funcs = testSet[i].funcs;
+ var receivers = testSet[i].receivers;
+ for (var j = 0; j < funcs.length; j++) {
+ var func = proto[funcs[j]];
+ for (var k = 0; k < receivers.length; k++) {
+ assertThrows(function () { func.call(receivers[k], {}) }, TypeError);
+ }
+ }
+ }
+}
+TestBogusReceivers(bogusReceiversTestSet);
+
+
// Stress Test
// There is a proposed stress-test available at the es-discuss mailing list
// which cannot be reasonably automated. Check it out by hand if you like:
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js b/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
index 4c49d9a..10aac2d 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/debug-blockscopes.js
@@ -29,9 +29,11 @@
// The functions used for testing backtraces. They are at the top to make the
// testing of source line/column easier.
+// TODO(ES6): properly activate extended mode
+"use strict";
// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug;
+var Debug = debug.Debug;
var test_name;
var listener_delegate;
@@ -76,6 +78,7 @@ function EndTest() {
end_test_count++;
}
+var global_object = this;
// Check that the scope chain contains the expected types of scopes.
function CheckScopeChain(scopes, exec_state) {
@@ -89,7 +92,7 @@ function CheckScopeChain(scopes, exec_state) {
if (scopes[i] == debug.ScopeType.Global) {
// Objects don't have same class (one is "global", other is "Object",
// so just check the properties directly.
- assertPropertiesEqual(this, scope.scopeObject().value());
+ assertPropertiesEqual(global_object, scope.scopeObject().value());
}
}
@@ -329,114 +332,6 @@ local_7(1);
EndTest();
-// Single empty with block.
-BeginTest("With block 1");
-
-function with_block_1() {
- with({}) {
- debugger;
- }
-}
-
-listener_delegate = function(exec_state) {
- CheckScopeChain([debug.ScopeType.With,
- debug.ScopeType.Local,
- debug.ScopeType.Global], exec_state);
- CheckScopeContent({}, 0, exec_state);
- CheckScopeContent({}, 1, exec_state);
-};
-with_block_1();
-EndTest();
-
-
-// Nested empty with blocks.
-BeginTest("With block 2");
-
-function with_block_2() {
- with({}) {
- with({}) {
- debugger;
- }
- }
-}
-
-listener_delegate = function(exec_state) {
- CheckScopeChain([debug.ScopeType.With,
- debug.ScopeType.With,
- debug.ScopeType.Local,
- debug.ScopeType.Global], exec_state);
- CheckScopeContent({}, 0, exec_state);
- CheckScopeContent({}, 1, exec_state);
- CheckScopeContent({}, 2, exec_state);
-};
-with_block_2();
-EndTest();
-
-
-// With block using an in-place object literal.
-BeginTest("With block 3");
-
-function with_block_3() {
- with({a:1,b:2}) {
- debugger;
- }
-}
-
-listener_delegate = function(exec_state) {
- CheckScopeChain([debug.ScopeType.With,
- debug.ScopeType.Local,
- debug.ScopeType.Global], exec_state);
- CheckScopeContent({a:1,b:2}, 0, exec_state);
-};
-with_block_3();
-EndTest();
-
-
-// Nested with blocks using in-place object literals.
-BeginTest("With block 4");
-
-function with_block_4() {
- with({a:1,b:2}) {
- with({a:2,b:1}) {
- debugger;
- }
- }
-}
-
-listener_delegate = function(exec_state) {
- CheckScopeChain([debug.ScopeType.With,
- debug.ScopeType.With,
- debug.ScopeType.Local,
- debug.ScopeType.Global], exec_state);
- CheckScopeContent({a:2,b:1}, 0, exec_state);
- CheckScopeContent({a:1,b:2}, 1, exec_state);
-};
-with_block_4();
-EndTest();
-
-
-// With block and a block local variable.
-BeginTest("With block 5");
-
-function with_block_5() {
- with({a:1}) {
- let a = 2;
- debugger;
- }
-}
-
-listener_delegate = function(exec_state) {
- CheckScopeChain([debug.ScopeType.Block,
- debug.ScopeType.With,
- debug.ScopeType.Local,
- debug.ScopeType.Global], exec_state);
- CheckScopeContent({a:2}, 0, exec_state);
- CheckScopeContent({a:1}, 1, exec_state);
-};
-with_block_5();
-EndTest();
-
-
// Simple closure formed by returning an inner function referering to an outer
// block local variable and an outer function's parameter.
BeginTest("Closure 1");
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/debug-evaluate-blockscopes.js b/src/3rdparty/v8/test/mjsunit/harmony/debug-evaluate-blockscopes.js
index 06139d3..d6ce8b2 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/debug-evaluate-blockscopes.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/debug-evaluate-blockscopes.js
@@ -30,6 +30,12 @@
// Test debug evaluation for functions without local context, but with
// nested catch contexts.
+// TODO(ES6): properly activate extended mode
+"use strict";
+
+var x;
+var result;
+
function f() {
{ // Line 1.
let i = 1; // Line 2.
@@ -42,7 +48,7 @@ function f() {
};
// Get the Debug object exposed from the debug context global object.
-Debug = debug.Debug
+var Debug = debug.Debug
// Set breakpoint on line 6.
var bp = Debug.setBreakPoint(f, 6);
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js b/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js
new file mode 100644
index 0000000..93e69e3
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-parsing.js
@@ -0,0 +1,159 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-modules
+
+// Test basic module syntax, with and without automatic semicolon insertion.
+
+module A {}
+
+module A1 = A
+module A2 = A;
+module A3 = A2
+
+module B {
+ export vx
+ export vy, lz, c, f
+
+ var vx
+ var vx, vy;
+ var vx = 0, vy
+ let lx, ly
+ let lz = 1
+ const c = 9
+ function f() {}
+
+ module C0 {}
+
+ export module C {
+ let x
+ export module D { export let x }
+ let y
+ }
+
+ let zz = ""
+
+ export var x0
+ export var x1, x2 = 6, x3
+ export let y0
+ export let y1 = 0, y2
+ export const z0 = 0
+ export const z1 = 2, z2 = 3
+ export function f0() {}
+ export module M1 {}
+ export module M2 = C.D
+ export module M3 at "http://where"
+
+ import i0 from I
+ import i1, i2, i3, M from I
+ import i4, i5 from "http://where"
+}
+
+module I {
+ export let i0, i1, i2, i3;
+ export module M {}
+}
+
+module C1 = B.C;
+module D1 = B.C.D
+module D2 = C1.D
+module D3 = D2
+
+module E1 at "http://where"
+module E2 at "http://where";
+module E3 = E1.F
+
+// Check that ASI does not interfere.
+
+module X
+{
+let x
+}
+
+module Y
+=
+X
+
+module Z
+at
+"file://local"
+
+import
+x
+,
+y
+from
+"file://local"
+
+
+module Wrap {
+export
+x
+,
+y
+
+export
+var
+v1 = 1
+
+export
+let
+v2 = 2
+
+export
+const
+v3 = 3
+
+export
+function
+f
+(
+)
+{
+}
+
+export
+module V
+{
+}
+}
+
+export A, A1, A2, A3, B, I, C1, D1, D2, D3, E1, E2, E3, X, Y, Z, Wrap, x, y, UU
+
+
+
+// Check that 'module' still works as an identifier.
+
+var module
+module = {}
+module["a"] = 6
+function module() {}
+function f(module) { return module }
+try {} catch (module) {}
+
+module
+v = 20
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js b/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js
new file mode 100644
index 0000000..f9f492c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/module-resolution.js
@@ -0,0 +1,139 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-modules --harmony-scoping
+
+// Test basic module interface inference.
+
+"use strict";
+
+print("begin.")
+
+export let x = print("0")
+
+export module B = A.B
+
+export module A {
+ export let x = print("1")
+ export let f = function() { return B.x }
+ export module B {
+ module BB = B
+ export BB, x
+ let x = print("2")
+ let y = print("3")
+ let Ax = A.x
+ let ABx = A.B.x
+ let Ay = A.y
+ let BBx = BB.x
+ let Af = A.f
+ function f(x,y) { return x }
+ }
+ export let y = print("4")
+ let Ax = A.x
+ let Bx = B.x
+ let ABx = A.B.x
+ module C {
+ export let z = print("5")
+ export module D = B
+ // TODO(rossberg): turn these into proper negative test cases once we have
+ // suitable error messages.
+ // import C.z // multiple declarations
+ import x from B
+ }
+ module D {
+ // TODO(rossberg): Handle import *.
+ // import A.* // invalid forward import
+ }
+ module M {}
+ // TODO(rossberg): Handle import *.
+ // import M.* // invalid forward import
+ let Cz = C.z
+ let CDx = C.D.x
+}
+
+export module Imports {
+ module A1 {
+ export module A2 {}
+ }
+ module B {
+ // TODO(rossberg): Handle import *.
+ // import A1.*
+ // import A2.* // unbound variable A2
+ }
+}
+
+export module E {
+ export let xx = x
+ export y, B
+ let Bx = B.x
+ // TODO(rossberg): Handle import *.
+ // import A.*
+}
+
+export module M1 {
+ export module A2 = M2
+}
+export module M2 {
+ export module A1 = M1
+}
+
+// TODO(rossberg): turn these into proper negative test cases once we have
+// suitable error messages.
+// module W1 = W2.W
+// module W2 = { export module W = W3 }
+// module W3 = W1 // cyclic module definition
+
+// module W1 = W2.W3
+// module W2 = {
+// export module W3 = W4
+// export module W4 = W1
+// } // cyclic module definition
+
+// TODO(rossberg): Handle import *.
+//module M3B = M3.B
+//export module M3 {
+// export module B { export let x = "" }
+// module C1 = { import M3.* }
+// module C2 = { import M3.B.* }
+// module C3 = { import M3B.* }
+// module C4 = { export x import B.* }
+//// TODO(rossberg): turn these into proper negative test cases once we have
+//// suitable error messages.
+//// export module C5 = { import C5.* } // invalid forward import
+//// export module C6 = { import M3.C6.* } // invalid forward import
+//}
+
+export module External at "external.js"
+export module External1 = External
+export module ExternalA = External.A
+export module InnerExternal {
+ export module E at "external.js"
+}
+export module External2 = InnerExternal.E
+//export let xxx = InnerExternal.E.A.x
+
+print("end.")
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/proxies-example-membrane.js b/src/3rdparty/v8/test/mjsunit/harmony/proxies-example-membrane.js
new file mode 100644
index 0000000..c6e7f9f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/harmony/proxies-example-membrane.js
@@ -0,0 +1,512 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony
+
+
+// A simple no-op handler. Adapted from:
+// http://wiki.ecmascript.org/doku.php?id=harmony:proxies#examplea_no-op_forwarding_proxy
+
+function createHandler(obj) {
+ return {
+ getOwnPropertyDescriptor: function(name) {
+ var desc = Object.getOwnPropertyDescriptor(obj, name);
+ if (desc !== undefined) desc.configurable = true;
+ return desc;
+ },
+ getPropertyDescriptor: function(name) {
+ var desc = Object.getOwnPropertyDescriptor(obj, name);
+ //var desc = Object.getPropertyDescriptor(obj, name); // not in ES5
+ if (desc !== undefined) desc.configurable = true;
+ return desc;
+ },
+ getOwnPropertyNames: function() {
+ return Object.getOwnPropertyNames(obj);
+ },
+ getPropertyNames: function() {
+ return Object.getOwnPropertyNames(obj);
+ //return Object.getPropertyNames(obj); // not in ES5
+ },
+ defineProperty: function(name, desc) {
+ Object.defineProperty(obj, name, desc);
+ },
+ delete: function(name) {
+ return delete obj[name];
+ },
+ fix: function() {
+ if (Object.isFrozen(obj)) {
+ var result = {};
+ Object.getOwnPropertyNames(obj).forEach(function(name) {
+ result[name] = Object.getOwnPropertyDescriptor(obj, name);
+ });
+ return result;
+ }
+ // As long as obj is not frozen, the proxy won't allow itself to be fixed
+ return undefined; // will cause a TypeError to be thrown
+ },
+ has: function(name) { return name in obj; },
+ hasOwn: function(name) { return ({}).hasOwnProperty.call(obj, name); },
+ get: function(receiver, name) { return obj[name]; },
+ set: function(receiver, name, val) {
+ obj[name] = val; // bad behavior when set fails in non-strict mode
+ return true;
+ },
+ enumerate: function() {
+ var result = [];
+ for (var name in obj) { result.push(name); };
+ return result;
+ },
+ keys: function() { return Object.keys(obj); }
+ };
+}
+
+
+
+// Auxiliary definitions enabling tracking of object identity in output.
+
+var objectMap = new WeakMap;
+var objectCounter = 0;
+
+function registerObject(x, s) {
+ if (x === Object(x) && !objectMap.has(x))
+ objectMap.set(x, ++objectCounter + (s == undefined ? "" : ":" + s));
+}
+
+registerObject(this, "global");
+registerObject(Object.prototype, "Object.prototype");
+
+function str(x) {
+ if (x === Object(x)) return "[" + typeof x + " " + objectMap.get(x) + "]";
+ if (typeof x == "string") return "\"" + x + "\"";
+ return "" + x;
+}
+
+
+
+// A simple membrane. Adapted from:
+// http://wiki.ecmascript.org/doku.php?id=harmony:proxies#a_simple_membrane
+
+function createSimpleMembrane(target) {
+ var enabled = true;
+
+ function wrap(obj) {
+ registerObject(obj);
+ print("wrap enter", str(obj));
+ try {
+ var x = wrap2(obj);
+ registerObject(x, "wrapped");
+ print("wrap exit", str(obj), "as", str(x));
+ return x;
+ } catch(e) {
+ print("wrap exception", str(e));
+ throw e;
+ }
+ }
+
+ function wrap2(obj) {
+ if (obj !== Object(obj)) {
+ return obj;
+ }
+
+ function wrapCall(fun, that, args) {
+ registerObject(that);
+ print("wrapCall enter", fun, str(that));
+ try {
+ var x = wrapCall2(fun, that, args);
+ print("wrapCall exit", fun, str(that), "returning", str(x));
+ return x;
+ } catch(e) {
+ print("wrapCall exception", fun, str(that), str(e));
+ throw e;
+ }
+ }
+
+ function wrapCall2(fun, that, args) {
+ if (!enabled) { throw new Error("disabled"); }
+ try {
+ return wrap(fun.apply(that, Array.prototype.map.call(args, wrap)));
+ } catch (e) {
+ throw wrap(e);
+ }
+ }
+
+ var baseHandler = createHandler(obj);
+ var handler = Proxy.create(Object.freeze({
+ get: function(receiver, name) {
+ return function() {
+ var arg = (name === "get" || name == "set") ? arguments[1] : "";
+ print("handler enter", name, arg);
+ var x = wrapCall(baseHandler[name], baseHandler, arguments);
+ print("handler exit", name, arg, "returning", str(x));
+ return x;
+ }
+ }
+ }));
+ registerObject(baseHandler, "basehandler");
+ registerObject(handler, "handler");
+
+ if (typeof obj === "function") {
+ function callTrap() {
+ print("call trap enter", str(obj), str(this));
+ var x = wrapCall(obj, wrap(this), arguments);
+ print("call trap exit", str(obj), str(this), "returning", str(x));
+ return x;
+ }
+ function constructTrap() {
+ if (!enabled) { throw new Error("disabled"); }
+ try {
+ function forward(args) { return obj.apply(this, args) }
+ return wrap(new forward(Array.prototype.map.call(arguments, wrap)));
+ } catch (e) {
+ throw wrap(e);
+ }
+ }
+ return Proxy.createFunction(handler, callTrap, constructTrap);
+ } else {
+ var prototype = wrap(Object.getPrototypeOf(obj));
+ return Proxy.create(handler, prototype);
+ }
+ }
+
+ var gate = Object.freeze({
+ enable: function() { enabled = true; },
+ disable: function() { enabled = false; }
+ });
+
+ return Object.freeze({
+ wrapper: wrap(target),
+ gate: gate
+ });
+}
+
+
+var o = {
+ a: 6,
+ b: {bb: 8},
+ f: function(x) { return x },
+ g: function(x) { return x.a },
+ h: function(x) { this.q = x }
+};
+o[2] = {c: 7};
+var m = createSimpleMembrane(o);
+var w = m.wrapper;
+print("o =", str(o))
+print("w =", str(w));
+
+var f = w.f;
+var x = f(66);
+var x = f({a: 1});
+var x = w.f({a: 1});
+var a = x.a;
+assertEquals(6, w.a);
+assertEquals(8, w.b.bb);
+assertEquals(7, w[2]["c"]);
+assertEquals(undefined, w.c);
+assertEquals(1, w.f(1));
+assertEquals(1, w.f({a: 1}).a);
+assertEquals(2, w.g({a: 2}));
+assertEquals(3, (w.r = {a: 3}).a);
+assertEquals(3, w.r.a);
+assertEquals(3, o.r.a);
+w.h(3);
+assertEquals(3, w.q);
+assertEquals(3, o.q);
+assertEquals(4, (new w.h(4)).q);
+
+var wb = w.b;
+var wr = w.r;
+var wf = w.f;
+var wf3 = w.f(3);
+var wfx = w.f({a: 6});
+var wgx = w.g({a: {aa: 7}});
+var wh4 = new w.h(4);
+m.gate.disable();
+assertEquals(3, wf3);
+assertThrows(function() { w.a }, Error);
+assertThrows(function() { w.r }, Error);
+assertThrows(function() { w.r = {a: 4} }, Error);
+assertThrows(function() { o.r.a }, Error);
+assertEquals("object", typeof o.r);
+assertEquals(5, (o.r = {a: 5}).a);
+assertEquals(5, o.r.a);
+assertThrows(function() { w[1] }, Error);
+assertThrows(function() { w.c }, Error);
+assertThrows(function() { wb.bb }, Error);
+assertThrows(function() { wr.a }, Error);
+assertThrows(function() { wf(4) }, Error);
+assertThrows(function() { wfx.a }, Error);
+assertThrows(function() { wgx.aa }, Error);
+assertThrows(function() { wh4.q }, Error);
+
+m.gate.enable();
+assertEquals(6, w.a);
+assertEquals(5, w.r.a);
+assertEquals(5, o.r.a);
+assertEquals(7, w.r = 7);
+assertEquals(7, w.r);
+assertEquals(7, o.r);
+assertEquals(8, w.b.bb);
+assertEquals(7, w[2]["c"]);
+assertEquals(undefined, w.c);
+assertEquals(8, wb.bb);
+assertEquals(3, wr.a);
+assertEquals(4, wf(4));
+assertEquals(3, wf3);
+assertEquals(6, wfx.a);
+assertEquals(7, wgx.aa);
+assertEquals(4, wh4.q);
+
+
+// An identity-preserving membrane. Adapted from:
+// http://wiki.ecmascript.org/doku.php?id=harmony:proxies#an_identity-preserving_membrane
+
+function createMembrane(wetTarget) {
+ var wet2dry = WeakMap();
+ var dry2wet = WeakMap();
+
+ function asDry(obj) {
+ registerObject(obj)
+ print("asDry enter", str(obj))
+ try {
+ var x = asDry2(obj);
+ registerObject(x, "dry");
+ print("asDry exit", str(obj), "as", str(x));
+ return x;
+ } catch(e) {
+ print("asDry exception", str(e));
+ throw e;
+ }
+ }
+ function asDry2(wet) {
+ if (wet !== Object(wet)) {
+ // primitives provide only irrevocable knowledge, so don't
+ // bother wrapping it.
+ return wet;
+ }
+ var dryResult = wet2dry.get(wet);
+ if (dryResult) { return dryResult; }
+
+ var wetHandler = createHandler(wet);
+ var dryRevokeHandler = Proxy.create(Object.freeze({
+ get: function(receiver, name) {
+ return function() {
+ var arg = (name === "get" || name == "set") ? arguments[1] : "";
+ print("dry handler enter", name, arg);
+ var optWetHandler = dry2wet.get(dryRevokeHandler);
+ try {
+ var x = asDry(optWetHandler[name].apply(
+ optWetHandler, Array.prototype.map.call(arguments, asWet)));
+ print("dry handler exit", name, arg, "returning", str(x));
+ return x;
+ } catch (eWet) {
+ var x = asDry(eWet);
+ print("dry handler exception", name, arg, "throwing", str(x));
+ throw x;
+ }
+ };
+ }
+ }));
+ dry2wet.set(dryRevokeHandler, wetHandler);
+
+ if (typeof wet === "function") {
+ function callTrap() {
+ print("dry call trap enter", str(this));
+ var x = asDry(wet.apply(
+ asWet(this), Array.prototype.map.call(arguments, asWet)));
+ print("dry call trap exit", str(this), "returning", str(x));
+ return x;
+ }
+ function constructTrap() {
+ function forward(args) { return wet.apply(this, args) }
+ return asDry(new forward(Array.prototype.map.call(arguments, asWet)));
+ }
+ dryResult =
+ Proxy.createFunction(dryRevokeHandler, callTrap, constructTrap);
+ } else {
+ dryResult =
+ Proxy.create(dryRevokeHandler, asDry(Object.getPrototypeOf(wet)));
+ }
+ wet2dry.set(wet, dryResult);
+ dry2wet.set(dryResult, wet);
+ return dryResult;
+ }
+
+ function asWet(obj) {
+ registerObject(obj)
+ print("asWet enter", str(obj))
+ try {
+ var x = asWet2(obj)
+ registerObject(x, "wet")
+ print("asWet exit", str(obj), "as", str(x))
+ return x
+ } catch(e) {
+ print("asWet exception", str(e))
+ throw e
+ }
+ }
+ function asWet2(dry) {
+ if (dry !== Object(dry)) {
+ // primitives provide only irrevocable knowledge, so don't
+ // bother wrapping it.
+ return dry;
+ }
+ var wetResult = dry2wet.get(dry);
+ if (wetResult) { return wetResult; }
+
+ var dryHandler = createHandler(dry);
+ var wetRevokeHandler = Proxy.create(Object.freeze({
+ get: function(receiver, name) {
+ return function() {
+ var arg = (name === "get" || name == "set") ? arguments[1] : "";
+ print("wet handler enter", name, arg);
+ var optDryHandler = wet2dry.get(wetRevokeHandler);
+ try {
+ var x = asWet(optDryHandler[name].apply(
+ optDryHandler, Array.prototype.map.call(arguments, asDry)));
+ print("wet handler exit", name, arg, "returning", str(x));
+ return x;
+ } catch (eDry) {
+ var x = asWet(eDry);
+ print("wet handler exception", name, arg, "throwing", str(x));
+ throw x;
+ }
+ };
+ }
+ }));
+ wet2dry.set(wetRevokeHandler, dryHandler);
+
+ if (typeof dry === "function") {
+ function callTrap() {
+ print("wet call trap enter", str(this));
+ var x = asWet(dry.apply(
+ asDry(this), Array.prototype.map.call(arguments, asDry)));
+ print("wet call trap exit", str(this), "returning", str(x));
+ return x;
+ }
+ function constructTrap() {
+ function forward(args) { return dry.apply(this, args) }
+ return asWet(new forward(Array.prototype.map.call(arguments, asDry)));
+ }
+ wetResult =
+ Proxy.createFunction(wetRevokeHandler, callTrap, constructTrap);
+ } else {
+ wetResult =
+ Proxy.create(wetRevokeHandler, asWet(Object.getPrototypeOf(dry)));
+ }
+ dry2wet.set(dry, wetResult);
+ wet2dry.set(wetResult, dry);
+ return wetResult;
+ }
+
+ var gate = Object.freeze({
+ revoke: function() {
+ dry2wet = wet2dry = Object.freeze({
+ get: function(key) { throw new Error("revoked"); },
+ set: function(key, val) { throw new Error("revoked"); }
+ });
+ }
+ });
+
+ return Object.freeze({ wrapper: asDry(wetTarget), gate: gate });
+}
+
+
+var receiver
+var argument
+var o = {
+ a: 6,
+ b: {bb: 8},
+ f: function(x) { receiver = this; argument = x; return x },
+ g: function(x) { receiver = this; argument = x; return x.a },
+ h: function(x) { receiver = this; argument = x; this.q = x },
+ s: function(x) { receiver = this; argument = x; this.x = {y: x}; return this }
+}
+o[2] = {c: 7}
+var m = createMembrane(o)
+var w = m.wrapper
+print("o =", str(o))
+print("w =", str(w))
+
+var f = w.f
+var x = f(66)
+var x = f({a: 1})
+var x = w.f({a: 1})
+var a = x.a
+assertEquals(6, w.a)
+assertEquals(8, w.b.bb)
+assertEquals(7, w[2]["c"])
+assertEquals(undefined, w.c)
+assertEquals(1, w.f(1))
+assertSame(o, receiver)
+assertEquals(1, w.f({a: 1}).a)
+assertSame(o, receiver)
+assertEquals(2, w.g({a: 2}))
+assertSame(o, receiver)
+assertSame(w, w.f(w))
+assertSame(o, receiver)
+assertSame(o, argument)
+assertSame(o, w.f(o))
+assertSame(o, receiver)
+// Note that argument !== o, since o isn't dry, so gets wrapped wet again.
+assertEquals(3, (w.r = {a: 3}).a)
+assertEquals(3, w.r.a)
+assertEquals(3, o.r.a)
+w.h(3)
+assertEquals(3, w.q)
+assertEquals(3, o.q)
+assertEquals(4, (new w.h(4)).q)
+assertEquals(5, w.s(5).x.y)
+assertSame(o, receiver)
+
+var wb = w.b
+var wr = w.r
+var wf = w.f
+var wf3 = w.f(3)
+var wfx = w.f({a: 6})
+var wgx = w.g({a: {aa: 7}})
+var wh4 = new w.h(4)
+var ws5 = w.s(5)
+var ws5x = ws5.x
+m.gate.revoke()
+assertEquals(3, wf3)
+assertThrows(function() { w.a }, Error)
+assertThrows(function() { w.r }, Error)
+assertThrows(function() { w.r = {a: 4} }, Error)
+assertThrows(function() { o.r.a }, Error)
+assertEquals("object", typeof o.r)
+assertEquals(5, (o.r = {a: 5}).a)
+assertEquals(5, o.r.a)
+assertThrows(function() { w[1] }, Error)
+assertThrows(function() { w.c }, Error)
+assertThrows(function() { wb.bb }, Error)
+assertEquals(3, wr.a)
+assertThrows(function() { wf(4) }, Error)
+assertEquals(6, wfx.a)
+assertEquals(7, wgx.aa)
+assertThrows(function() { wh4.q }, Error)
+assertThrows(function() { ws5.x }, Error)
+assertThrows(function() { ws5x.y }, Error)
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/proxies-function.js b/src/3rdparty/v8/test/mjsunit/harmony/proxies-function.js
index 6a88d19..6b8d098 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/proxies-function.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/proxies-function.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --harmony-proxies
+// Flags: --harmony-proxies --allow-natives-syntax
// Helper.
@@ -53,7 +53,8 @@ var receiver
function TestCall(isStrict, callTrap) {
assertEquals(42, callTrap(5, 37))
- assertEquals(isStrict ? undefined : global_object, receiver)
+ // TODO(rossberg): strict mode seems to be broken on x64...
+ // assertSame(isStrict ? undefined : global_object, receiver)
var handler = {
get: function(r, k) {
@@ -61,11 +62,13 @@ function TestCall(isStrict, callTrap) {
}
}
var f = Proxy.createFunction(handler, callTrap)
+ var o = {f: f}
+ global_object.f = f
receiver = 333
assertEquals(42, f(11, 31))
- assertEquals(isStrict ? undefined : global_object, receiver)
- var o = {f: f}
+ // TODO(rossberg): strict mode seems to be broken on x64...
+ // assertSame(isStrict ? undefined : global_object, receiver)
receiver = 333
assertEquals(42, o.f(10, 32))
assertSame(o, receiver)
@@ -82,6 +85,9 @@ function TestCall(isStrict, callTrap) {
assertEquals(42, f.call(o, 32, 10))
assertSame(o, receiver)
receiver = 333
+ assertEquals(42, f.call(undefined, 33, 9))
+ assertSame(isStrict ? undefined : global_object, receiver)
+ receiver = 333
assertEquals(42, f.call(null, 33, 9))
assertSame(isStrict ? null : global_object, receiver)
receiver = 333
@@ -101,6 +107,24 @@ function TestCall(isStrict, callTrap) {
receiver = 333
assertEquals(32, Function.prototype.apply.call(f, o, [17, 15]))
assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Call(o, 11, 31, f))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Call(null, 11, 31, f))
+ assertSame(isStrict ? null : global_object, receiver)
+ receiver = 333
+ assertEquals(42, %Apply(f, o, [11, 31], 0, 2))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Apply(f, null, [11, 31], 0, 2))
+ assertSame(isStrict ? null : global_object, receiver)
+ receiver = 333
+ assertEquals(42, %_CallFunction(o, 11, 31, f))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %_CallFunction(null, 11, 31, f))
+ assertSame(isStrict ? null : global_object, receiver)
var ff = Function.prototype.bind.call(f, o, 12)
assertTrue(ff.length <= 1) // TODO(rossberg): Not spec'ed yet, be lax.
@@ -108,8 +132,29 @@ function TestCall(isStrict, callTrap) {
assertEquals(42, ff(30))
assertSame(o, receiver)
receiver = 333
+ assertEquals(33, Function.prototype.call.call(ff, {}, 21))
+ assertSame(o, receiver)
+ receiver = 333
assertEquals(32, Function.prototype.apply.call(ff, {}, [20]))
assertSame(o, receiver)
+ receiver = 333
+ assertEquals(23, %Call({}, 11, ff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(23, %Call({}, 11, 3, ff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(24, %Apply(ff, {}, [12, 13], 0, 1))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(24, %Apply(ff, {}, [12, 13], 0, 2))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(34, %_CallFunction({}, 22, ff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(34, %_CallFunction({}, 22, 3, ff))
+ assertSame(o, receiver)
var fff = Function.prototype.bind.call(ff, o, 30)
assertEquals(0, fff.length)
@@ -119,6 +164,30 @@ function TestCall(isStrict, callTrap) {
receiver = 333
assertEquals(42, Function.prototype.call.call(fff, {}))
assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, Function.prototype.apply.call(fff, {}))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Call({}, fff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Call({}, 11, 3, fff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Apply(fff, {}, [], 0, 0))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Apply(fff, {}, [12, 13], 0, 0))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %Apply(fff, {}, [12, 13], 0, 2))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %_CallFunction({}, fff))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %_CallFunction({}, 3, 4, 5, fff))
+ assertSame(o, receiver)
var f = CreateFrozen({}, callTrap)
receiver = 333
@@ -144,10 +213,13 @@ function TestCall(isStrict, callTrap) {
assertEquals(32, Function.prototype.apply.call(f, o, [17, 15]))
assertSame(o, receiver)
receiver = 333
- assertEquals(42, ff(30))
+ assertEquals(23, %Call(o, 11, 12, f))
assertSame(o, receiver)
receiver = 333
- assertEquals(32, Function.prototype.apply.call(ff, {}, [20]))
+ assertEquals(27, %Apply(f, o, [12, 13, 14], 1, 2))
+ assertSame(o, receiver)
+ receiver = 333
+ assertEquals(42, %_CallFunction(o, 18, 24, f))
assertSame(o, receiver)
}
@@ -163,7 +235,8 @@ TestCall(true, function(x, y) {
})
TestCall(false, function() {
- receiver = this; return arguments[0] + arguments[1]
+ receiver = this
+ return arguments[0] + arguments[1]
})
TestCall(false, Proxy.createFunction(handler, function(x, y) {
@@ -209,6 +282,12 @@ function TestCallThrow(callTrap) {
assertThrows(function(){ ({x: f})["x"](11) }, "myexn")
assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn")
assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn")
+ assertThrows(function(){ %Call({}, f) }, "myexn")
+ assertThrows(function(){ %Call({}, 1, 2, f) }, "myexn")
+ assertThrows(function(){ %Apply({}, f, [], 3, 0) }, "myexn")
+ assertThrows(function(){ %Apply({}, f, [3, 4], 0, 1) }, "myexn")
+ assertThrows(function(){ %_CallFunction({}, f) }, "myexn")
+ assertThrows(function(){ %_CallFunction({}, 1, 2, f) }, "myexn")
var f = CreateFrozen({}, callTrap)
assertThrows(function(){ f(11) }, "myexn")
@@ -216,6 +295,12 @@ function TestCallThrow(callTrap) {
assertThrows(function(){ ({x: f})["x"](11) }, "myexn")
assertThrows(function(){ Function.prototype.call.call(f, {}, 2) }, "myexn")
assertThrows(function(){ Function.prototype.apply.call(f, {}, [1]) }, "myexn")
+ assertThrows(function(){ %Call({}, f) }, "myexn")
+ assertThrows(function(){ %Call({}, 1, 2, f) }, "myexn")
+ assertThrows(function(){ %Apply({}, f, [], 3, 0) }, "myexn")
+ assertThrows(function(){ %Apply({}, f, [3, 4], 0, 1) }, "myexn")
+ assertThrows(function(){ %_CallFunction({}, f) }, "myexn")
+ assertThrows(function(){ %_CallFunction({}, 1, 2, f) }, "myexn")
}
TestCallThrow(function() { throw "myexn" })
@@ -226,7 +311,7 @@ TestCallThrow(CreateFrozen({}, function() { throw "myexn" }))
// Construction (new).
-var prototype = {}
+var prototype = {myprop: 0}
var receiver
var handlerWithPrototype = {
@@ -307,22 +392,25 @@ TestConstruct(prototype, CreateFrozen(handler, ReturnNewWithProto))
// Construction with derived construct trap.
function TestConstructFromCall(proto, returnsThis, callTrap) {
- TestConstructFromCall2(proto, returnsThis, callTrap, handlerWithPrototype)
+ TestConstructFromCall2(prototype, returnsThis, callTrap, handlerWithPrototype)
TestConstructFromCall2(proto, returnsThis, callTrap, handlerSansPrototype)
}
function TestConstructFromCall2(proto, returnsThis, callTrap, handler) {
+ // TODO(rossberg): handling of prototype for derived construct trap will be
+ // fixed in a separate change. Commenting out checks below for now.
var f = Proxy.createFunction(handler, callTrap)
var o = new f(11, 31)
if (returnsThis) assertEquals(o, receiver)
assertEquals(42, o.sum)
- assertSame(proto, Object.getPrototypeOf(o))
+ // assertSame(proto, Object.getPrototypeOf(o))
- var f = CreateFrozen(handler, callTrap)
- var o = new f(11, 32)
+ var g = CreateFrozen(handler, callTrap)
+ // assertSame(f.prototype, g.prototype)
+ var o = new g(11, 32)
if (returnsThis) assertEquals(o, receiver)
assertEquals(43, o.sum)
- assertSame(proto, Object.getPrototypeOf(o))
+ // assertSame(proto, Object.getPrototypeOf(o))
}
TestConstructFromCall(Object.prototype, true, ReturnUndef)
@@ -445,7 +533,7 @@ function TestAccessorCall(getterCallTrap, setterCallTrap) {
assertEquals("", receiver)
receiver = ""
assertEquals(42, oo.b)
- assertSame(o, receiver)
+ assertSame(oo, receiver)
receiver = ""
assertEquals(undefined, oo.c)
assertEquals("", receiver)
@@ -454,7 +542,7 @@ function TestAccessorCall(getterCallTrap, setterCallTrap) {
assertEquals("", receiver)
receiver = ""
assertEquals(42, oo[3])
- assertSame(o, receiver)
+ assertSame(oo, receiver)
receiver = ""
assertEquals(50, o.a = 50)
@@ -520,3 +608,141 @@ TestAccessorCall(
CreateFrozen({}, function() { receiver = this; return 42 }),
CreateFrozen({}, function(x) { receiver = this; value = x })
)
+
+
+
+// Passing a proxy function to higher-order library functions.
+
+function TestHigherOrder(f) {
+ assertEquals(6, [6, 2].map(f)[0])
+ assertEquals(4, [5, 2].reduce(f, 4))
+ assertTrue([1, 2].some(f))
+ assertEquals("a.b.c", "a.b.c".replace(".", f))
+}
+
+TestHigherOrder(function(x) { return x })
+TestHigherOrder(function(x) { "use strict"; return x })
+TestHigherOrder(Proxy.createFunction({}, function(x) { return x }))
+TestHigherOrder(CreateFrozen({}, function(x) { return x }))
+
+
+
+// TODO(rossberg): Ultimately, I want to have the following test function
+// run through, but it currently fails on so many cases (some not even
+// involving proxies), that I leave that for later...
+/*
+function TestCalls() {
+ var handler = {
+ get: function(r, k) {
+ return k == "length" ? 2 : Function.prototype[k]
+ }
+ }
+ var bind = Function.prototype.bind
+ var o = {}
+
+ var traps = [
+ function(x, y) {
+ return {receiver: this, result: x + y, strict: false}
+ },
+ function(x, y) { "use strict";
+ return {receiver: this, result: x + y, strict: true}
+ },
+ function() {
+ var x = arguments[0], y = arguments[1]
+ return {receiver: this, result: x + y, strict: false}
+ },
+ Proxy.createFunction(handler, function(x, y) {
+ return {receiver: this, result: x + y, strict: false}
+ }),
+ Proxy.createFunction(handler, function() {
+ var x = arguments[0], y = arguments[1]
+ return {receiver: this, result: x + y, strict: false}
+ }),
+ Proxy.createFunction(handler, function(x, y) { "use strict"
+ return {receiver: this, result: x + y, strict: true}
+ }),
+ CreateFrozen(handler, function(x, y) {
+ return {receiver: this, result: x + y, strict: false}
+ }),
+ CreateFrozen(handler, function(x, y) { "use strict"
+ return {receiver: this, result: x + y, strict: true}
+ }),
+ ]
+ var creates = [
+ function(trap) { return trap },
+ function(trap) { return CreateFrozen({}, callTrap) },
+ function(trap) { return Proxy.createFunction(handler, callTrap) },
+ function(trap) {
+ return Proxy.createFunction(handler, CreateFrozen({}, callTrap))
+ },
+ function(trap) {
+ return Proxy.createFunction(handler, Proxy.createFunction(handler, callTrap))
+ },
+ ]
+ var binds = [
+ function(f, o, x, y) { return f },
+ function(f, o, x, y) { return bind.call(f, o) },
+ function(f, o, x, y) { return bind.call(f, o, x) },
+ function(f, o, x, y) { return bind.call(f, o, x, y) },
+ function(f, o, x, y) { return bind.call(f, o, x, y, 5) },
+ function(f, o, x, y) { return bind.call(bind.call(f, o), {}, x, y) },
+ function(f, o, x, y) { return bind.call(bind.call(f, o, x), {}, y) },
+ function(f, o, x, y) { return bind.call(bind.call(f, o, x, y), {}, 5) },
+ ]
+ var calls = [
+ function(f, x, y) { return f(x, y) },
+ function(f, x, y) { var g = f; return g(x, y) },
+ function(f, x, y) { with ({}) return f(x, y) },
+ function(f, x, y) { var g = f; with ({}) return g(x, y) },
+ function(f, x, y, o) { with (o) return f(x, y) },
+ function(f, x, y, o) { return f.call(o, x, y) },
+ function(f, x, y, o) { return f.apply(o, [x, y]) },
+ function(f, x, y, o) { return Function.prototype.call.call(f, o, x, y) },
+ function(f, x, y, o) { return Function.prototype.apply.call(f, o, [x, y]) },
+ function(f, x, y, o) { return %_CallFunction(o, x, y, f) },
+ function(f, x, y, o) { return %Call(o, x, y, f) },
+ function(f, x, y, o) { return %Apply(f, o, [null, x, y, null], 1, 2) },
+ function(f, x, y, o) { return %Apply(f, o, arguments, 2, 2) },
+ function(f, x, y, o) { if (typeof o == "object") return o.f(x, y) },
+ function(f, x, y, o) { if (typeof o == "object") return o["f"](x, y) },
+ function(f, x, y, o) { if (typeof o == "object") return (1, o).f(x, y) },
+ function(f, x, y, o) { if (typeof o == "object") return (1, o)["f"](x, y) },
+ ]
+ var receivers = [o, global_object, undefined, null, 2, "bla", true]
+ var expectedNonStricts = [o, global_object, global_object, global_object]
+
+ for (var t = 0; t < traps.length; ++t) {
+ for (var i = 0; i < creates.length; ++i) {
+ for (var j = 0; j < binds.length; ++j) {
+ for (var k = 0; k < calls.length; ++k) {
+ for (var m = 0; m < receivers.length; ++m) {
+ for (var n = 0; n < receivers.length; ++n) {
+ var bound = receivers[m]
+ var receiver = receivers[n]
+ var func = binds[j](creates[i](traps[t]), bound, 31, 11)
+ var expected = j > 0 ? bound : receiver
+ var expectedNonStrict = expectedNonStricts[j > 0 ? m : n]
+ o.f = func
+ global_object.f = func
+ var x = calls[k](func, 11, 31, receiver)
+ if (x !== undefined) {
+ assertEquals(42, x.result)
+ if (calls[k].length < 4)
+ assertSame(x.strict ? undefined : global_object, x.receiver)
+ else if (x.strict)
+ assertSame(expected, x.receiver)
+ else if (expectedNonStrict === undefined)
+ assertSame(expected, x.receiver.valueOf())
+ else
+ assertSame(expectedNonStrict, x.receiver)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+TestCalls()
+*/
diff --git a/src/3rdparty/v8/test/mjsunit/harmony/proxies.js b/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
index 1ce7a32..8d8f839 100644
--- a/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
+++ b/src/3rdparty/v8/test/mjsunit/harmony/proxies.js
@@ -1437,7 +1437,7 @@ TestHasOwnThrow(Proxy.create({
// Instanceof (instanceof)
-function TestInstanceof() {
+function TestProxyInstanceof() {
var o1 = {}
var p1 = Proxy.create({})
var p2 = Proxy.create({}, o1)
@@ -1483,7 +1483,70 @@ function TestInstanceof() {
assertTrue(f instanceof Function)
}
-TestInstanceof()
+TestProxyInstanceof()
+
+
+function TestInstanceofProxy() {
+ var o0 = Object.create(null)
+ var o1 = {}
+ var o2 = Object.create(o0)
+ var o3 = Object.create(o1)
+ var o4 = Object.create(o2)
+ var o5 = Object.create(o3)
+
+ function handler(o) { return {get: function() { return o } } }
+ var f0 = Proxy.createFunction(handler(o0), function() {})
+ var f1 = Proxy.createFunction(handler(o1), function() {})
+ var f2 = Proxy.createFunction(handler(o2), function() {})
+ var f3 = Proxy.createFunction(handler(o3), function() {})
+ var f4 = Proxy.createFunction(handler(o4), function() {})
+ var f5 = Proxy.createFunction(handler(o4), function() {})
+
+ assertFalse(null instanceof f0)
+ assertFalse(o0 instanceof f0)
+ assertFalse(o0 instanceof f1)
+ assertFalse(o0 instanceof f2)
+ assertFalse(o0 instanceof f3)
+ assertFalse(o0 instanceof f4)
+ assertFalse(o0 instanceof f5)
+ assertFalse(o1 instanceof f0)
+ assertFalse(o1 instanceof f1)
+ assertFalse(o1 instanceof f2)
+ assertFalse(o1 instanceof f3)
+ assertFalse(o1 instanceof f4)
+ assertFalse(o1 instanceof f5)
+ assertTrue(o2 instanceof f0)
+ assertFalse(o2 instanceof f1)
+ assertFalse(o2 instanceof f2)
+ assertFalse(o2 instanceof f3)
+ assertFalse(o2 instanceof f4)
+ assertFalse(o2 instanceof f5)
+ assertFalse(o3 instanceof f0)
+ assertTrue(o3 instanceof f1)
+ assertFalse(o3 instanceof f2)
+ assertFalse(o3 instanceof f3)
+ assertFalse(o3 instanceof f4)
+ assertFalse(o3 instanceof f5)
+ assertTrue(o4 instanceof f0)
+ assertFalse(o4 instanceof f1)
+ assertTrue(o4 instanceof f2)
+ assertFalse(o4 instanceof f3)
+ assertFalse(o4 instanceof f4)
+ assertFalse(o4 instanceof f5)
+ assertFalse(o5 instanceof f0)
+ assertTrue(o5 instanceof f1)
+ assertFalse(o5 instanceof f2)
+ assertTrue(o5 instanceof f3)
+ assertFalse(o5 instanceof f4)
+ assertFalse(o5 instanceof f5)
+
+ var f = Proxy.createFunction({}, function() {})
+ var ff = Proxy.createFunction(handler(Function), function() {})
+ assertTrue(f instanceof Function)
+ assertFalse(f instanceof ff)
+}
+
+TestInstanceofProxy()
@@ -2194,3 +2257,22 @@ TestIsEnumerableThrow(Proxy.create({
return function(k) { throw "myexn" }
}
}))
+
+
+
+// Constructor functions with proxy prototypes.
+
+function TestConstructorWithProxyPrototype() {
+ TestWithProxies(TestConstructorWithProxyPrototype2, {})
+}
+
+function TestConstructorWithProxyPrototype2(create, handler) {
+ function C() {};
+ C.prototype = create(handler);
+
+ var o = new C;
+ assertSame(C.prototype, o.__proto__);
+ assertSame(C.prototype, Object.getPrototypeOf(o));
+}
+
+TestConstructorWithProxyPrototype();
diff --git a/src/3rdparty/v8/test/mjsunit/math-min-max.js b/src/3rdparty/v8/test/mjsunit/math-min-max.js
index 0833c5c..e4fd313 100644
--- a/src/3rdparty/v8/test/mjsunit/math-min-max.js
+++ b/src/3rdparty/v8/test/mjsunit/math-min-max.js
@@ -115,3 +115,75 @@ assertEquals(NaN, Math.max(1, 'oxen'));
assertEquals(Infinity, 1/Math.max(ZERO, -0));
assertEquals(Infinity, 1/Math.max(-0, ZERO));
+
+function run(crankshaft_test) {
+ crankshaft_test(1);
+ crankshaft_test(1);
+ %OptimizeFunctionOnNextCall(crankshaft_test);
+ crankshaft_test(-0);
+}
+
+function crankshaft_test_1(arg) {
+ var v1 = 1;
+ var v2 = 5;
+ var v3 = 1.5;
+ var v4 = 5.5;
+ var v5 = 2;
+ var v6 = 6;
+ var v7 = 0;
+ var v8 = -0;
+
+ var v9 = 9.9;
+ var v0 = 10.1;
+ // Integer32 representation.
+ assertEquals(v2, Math.max(v1++, v2++));
+ assertEquals(v1, Math.min(v1++, v2++));
+ // Tagged representation.
+ assertEquals(v4, Math.max(v3, v4));
+ assertEquals(v3, Math.min(v3, v4));
+ assertEquals(v6, Math.max(v5, v6));
+ assertEquals(v5, Math.min(v5, v6));
+ // Double representation.
+ assertEquals(v0, Math.max(v0++, v9++));
+ assertEquals(v9, Math.min(v0++, v9++));
+ // Mixed representation.
+ assertEquals(v1, Math.min(v1++, v9++)); // int32, double
+ assertEquals(v0, Math.max(v0++, v2++)); // double, int32
+ assertEquals(v1, Math.min(v1++, v6)); // int32, tagged
+ assertEquals(v2, Math.max(v5, v2++)); // tagged, int32
+ assertEquals(v6, Math.min(v6, v9++)); // tagged, double
+ assertEquals(v0, Math.max(v0++, v5)); // double, tagged
+
+ // Minus zero.
+ assertEquals(Infinity, 1/Math.max(v7, v8));
+ assertEquals(-Infinity, 1/Math.min(v7, v8));
+ // NaN.
+ assertEquals(NaN, Math.max(NaN, v8));
+ assertEquals(NaN, Math.min(NaN, v9));
+ assertEquals(NaN, Math.max(v8, NaN));
+ assertEquals(NaN, Math.min(v9, NaN));
+ // Minus zero as Integer32.
+ assertEquals((arg === -0) ? -Infinity : 1, 1/Math.min(arg, v2));
+}
+
+run(crankshaft_test_1);
+
+function crankshaft_test_2() {
+ var v9 = {};
+ v9.valueOf = function() { return 6; }
+ // Deopt expected due to non-heapnumber objects.
+ assertEquals(6, Math.min(v9, 12));
+}
+
+run(crankshaft_test_2);
+
+// Test overriding Math.min and Math.max
+Math.min = function(a, b) { return a + b; }
+Math.max = function(a, b) { return a - b; }
+
+function crankshaft_test_3() {
+ assertEquals(8, Math.min(3, 5));
+ assertEquals(3, Math.max(5, 2));
+}
+
+run(crankshaft_test_3);
diff --git a/src/3rdparty/v8/test/mjsunit/math-pow.js b/src/3rdparty/v8/test/mjsunit/math-pow.js
index 30d0cbd..fb5f8a1 100644
--- a/src/3rdparty/v8/test/mjsunit/math-pow.js
+++ b/src/3rdparty/v8/test/mjsunit/math-pow.js
@@ -25,118 +25,149 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
// Tests the special cases specified by ES 15.8.2.13
-// Simple sanity check
-assertEquals(4, Math.pow(2, 2));
-assertEquals(2147483648, Math.pow(2, 31));
-assertEquals(0.25, Math.pow(2, -2));
-assertEquals(0.0625, Math.pow(2, -4));
-assertEquals(1, Math.pow(1, 100));
-assertEquals(0, Math.pow(0, 1000));
-
-// Spec tests
-assertEquals(NaN, Math.pow(2, NaN));
-assertEquals(NaN, Math.pow(+0, NaN));
-assertEquals(NaN, Math.pow(-0, NaN));
-assertEquals(NaN, Math.pow(Infinity, NaN));
-assertEquals(NaN, Math.pow(-Infinity, NaN));
-
-assertEquals(1, Math.pow(NaN, +0));
-assertEquals(1, Math.pow(NaN, -0));
-
-assertEquals(NaN, Math.pow(NaN, NaN));
-assertEquals(NaN, Math.pow(NaN, 2.2));
-assertEquals(NaN, Math.pow(NaN, 1));
-assertEquals(NaN, Math.pow(NaN, -1));
-assertEquals(NaN, Math.pow(NaN, -2.2));
-assertEquals(NaN, Math.pow(NaN, Infinity));
-assertEquals(NaN, Math.pow(NaN, -Infinity));
-
-assertEquals(Infinity, Math.pow(1.1, Infinity));
-assertEquals(Infinity, Math.pow(-1.1, Infinity));
-assertEquals(Infinity, Math.pow(2, Infinity));
-assertEquals(Infinity, Math.pow(-2, Infinity));
-
-// Because +0 == -0, we need to compare 1/{+,-}0 to {+,-}Infinity
-assertEquals(+Infinity, 1/Math.pow(1.1, -Infinity));
-assertEquals(+Infinity, 1/Math.pow(-1.1, -Infinity));
-assertEquals(+Infinity, 1/Math.pow(2, -Infinity));
-assertEquals(+Infinity, 1/Math.pow(-2, -Infinity));
-
-assertEquals(NaN, Math.pow(1, Infinity));
-assertEquals(NaN, Math.pow(1, -Infinity));
-assertEquals(NaN, Math.pow(-1, Infinity));
-assertEquals(NaN, Math.pow(-1, -Infinity));
-
-assertEquals(+0, Math.pow(0.1, Infinity));
-assertEquals(+0, Math.pow(-0.1, Infinity));
-assertEquals(+0, Math.pow(0.999, Infinity));
-assertEquals(+0, Math.pow(-0.999, Infinity));
-
-assertEquals(Infinity, Math.pow(0.1, -Infinity));
-assertEquals(Infinity, Math.pow(-0.1, -Infinity));
-assertEquals(Infinity, Math.pow(0.999, -Infinity));
-assertEquals(Infinity, Math.pow(-0.999, -Infinity));
-
-assertEquals(Infinity, Math.pow(Infinity, 0.1));
-assertEquals(Infinity, Math.pow(Infinity, 2));
-
-assertEquals(+Infinity, 1/Math.pow(Infinity, -0.1));
-assertEquals(+Infinity, 1/Math.pow(Infinity, -2));
-
-assertEquals(-Infinity, Math.pow(-Infinity, 3));
-assertEquals(-Infinity, Math.pow(-Infinity, 13));
-
-assertEquals(Infinity, Math.pow(-Infinity, 3.1));
-assertEquals(Infinity, Math.pow(-Infinity, 2));
-
-assertEquals(-Infinity, 1/Math.pow(-Infinity, -3));
-assertEquals(-Infinity, 1/Math.pow(-Infinity, -13));
-
-assertEquals(+Infinity, 1/Math.pow(-Infinity, -3.1));
-assertEquals(+Infinity, 1/Math.pow(-Infinity, -2));
-
-assertEquals(+Infinity, 1/Math.pow(+0, 1.1));
-assertEquals(+Infinity, 1/Math.pow(+0, 2));
-
-assertEquals(Infinity, Math.pow(+0, -1.1));
-assertEquals(Infinity, Math.pow(+0, -2));
-
-assertEquals(-Infinity, 1/Math.pow(-0, 3));
-assertEquals(-Infinity, 1/Math.pow(-0, 13));
-
-assertEquals(+Infinity, 1/Math.pow(-0, 3.1));
-assertEquals(+Infinity, 1/Math.pow(-0, 2));
-
-assertEquals(-Infinity, Math.pow(-0, -3));
-assertEquals(-Infinity, Math.pow(-0, -13));
-
-assertEquals(Infinity, Math.pow(-0, -3.1));
-assertEquals(Infinity, Math.pow(-0, -2));
-
-assertEquals(NaN, Math.pow(-0.00001, 1.1));
-assertEquals(NaN, Math.pow(-0.00001, -1.1));
-assertEquals(NaN, Math.pow(-1.1, 1.1));
-assertEquals(NaN, Math.pow(-1.1, -1.1));
-assertEquals(NaN, Math.pow(-2, 1.1));
-assertEquals(NaN, Math.pow(-2, -1.1));
-assertEquals(NaN, Math.pow(-1000, 1.1));
-assertEquals(NaN, Math.pow(-1000, -1.1));
-
-assertEquals(+Infinity, 1/Math.pow(-0, 0.5));
-assertEquals(+Infinity, 1/Math.pow(-0, 0.6));
-assertEquals(-Infinity, 1/Math.pow(-0, 1));
-assertEquals(-Infinity, 1/Math.pow(-0, 10000000001));
-
-assertEquals(+Infinity, Math.pow(-0, -0.5));
-assertEquals(+Infinity, Math.pow(-0, -0.6));
-assertEquals(-Infinity, Math.pow(-0, -1));
-assertEquals(-Infinity, Math.pow(-0, -10000000001));
-
-
-
-// Tests from Sputnik S8.5_A13_T1.
-assertTrue((1*((Math.pow(2,53))-1)*(Math.pow(2,-1074))) === 4.4501477170144023e-308);
-assertTrue((1*(Math.pow(2,52))*(Math.pow(2,-1074))) === 2.2250738585072014e-308);
-assertTrue((-1*(Math.pow(2,52))*(Math.pow(2,-1074))) === -2.2250738585072014e-308);
+function test() {
+ // Simple sanity check
+ assertEquals(4, Math.pow(2, 2));
+ assertEquals(2147483648, Math.pow(2, 31));
+ assertEquals(0.25, Math.pow(2, -2));
+ assertEquals(0.0625, Math.pow(2, -4));
+ assertEquals(1, Math.pow(1, 100));
+ assertEquals(0, Math.pow(0, 1000));
+
+ // Spec tests
+ assertEquals(NaN, Math.pow(2, NaN));
+ assertEquals(NaN, Math.pow(+0, NaN));
+ assertEquals(NaN, Math.pow(-0, NaN));
+ assertEquals(NaN, Math.pow(Infinity, NaN));
+ assertEquals(NaN, Math.pow(-Infinity, NaN));
+
+ assertEquals(1, Math.pow(NaN, +0));
+ assertEquals(1, Math.pow(NaN, -0));
+
+ assertEquals(NaN, Math.pow(NaN, NaN));
+ assertEquals(NaN, Math.pow(NaN, 2.2));
+ assertEquals(NaN, Math.pow(NaN, 1));
+ assertEquals(NaN, Math.pow(NaN, -1));
+ assertEquals(NaN, Math.pow(NaN, -2.2));
+ assertEquals(NaN, Math.pow(NaN, Infinity));
+ assertEquals(NaN, Math.pow(NaN, -Infinity));
+
+ assertEquals(Infinity, Math.pow(1.1, Infinity));
+ assertEquals(Infinity, Math.pow(-1.1, Infinity));
+ assertEquals(Infinity, Math.pow(2, Infinity));
+ assertEquals(Infinity, Math.pow(-2, Infinity));
+
+ // Because +0 == -0, we need to compare 1/{+,-}0 to {+,-}Infinity
+ assertEquals(+Infinity, 1/Math.pow(1.1, -Infinity));
+ assertEquals(+Infinity, 1/Math.pow(-1.1, -Infinity));
+ assertEquals(+Infinity, 1/Math.pow(2, -Infinity));
+ assertEquals(+Infinity, 1/Math.pow(-2, -Infinity));
+
+ assertEquals(NaN, Math.pow(1, Infinity));
+ assertEquals(NaN, Math.pow(1, -Infinity));
+ assertEquals(NaN, Math.pow(-1, Infinity));
+ assertEquals(NaN, Math.pow(-1, -Infinity));
+
+ assertEquals(+0, Math.pow(0.1, Infinity));
+ assertEquals(+0, Math.pow(-0.1, Infinity));
+ assertEquals(+0, Math.pow(0.999, Infinity));
+ assertEquals(+0, Math.pow(-0.999, Infinity));
+
+ assertEquals(Infinity, Math.pow(0.1, -Infinity));
+ assertEquals(Infinity, Math.pow(-0.1, -Infinity));
+ assertEquals(Infinity, Math.pow(0.999, -Infinity));
+ assertEquals(Infinity, Math.pow(-0.999, -Infinity));
+
+ assertEquals(Infinity, Math.pow(Infinity, 0.1));
+ assertEquals(Infinity, Math.pow(Infinity, 2));
+
+ assertEquals(+Infinity, 1/Math.pow(Infinity, -0.1));
+ assertEquals(+Infinity, 1/Math.pow(Infinity, -2));
+
+ assertEquals(-Infinity, Math.pow(-Infinity, 3));
+ assertEquals(-Infinity, Math.pow(-Infinity, 13));
+
+ assertEquals(Infinity, Math.pow(-Infinity, 3.1));
+ assertEquals(Infinity, Math.pow(-Infinity, 2));
+
+ assertEquals(-Infinity, 1/Math.pow(-Infinity, -3));
+ assertEquals(-Infinity, 1/Math.pow(-Infinity, -13));
+
+ assertEquals(+Infinity, 1/Math.pow(-Infinity, -3.1));
+ assertEquals(+Infinity, 1/Math.pow(-Infinity, -2));
+
+ assertEquals(+Infinity, 1/Math.pow(+0, 1.1));
+ assertEquals(+Infinity, 1/Math.pow(+0, 2));
+
+ assertEquals(Infinity, Math.pow(+0, -1.1));
+ assertEquals(Infinity, Math.pow(+0, -2));
+
+ assertEquals(-Infinity, 1/Math.pow(-0, 3));
+ assertEquals(-Infinity, 1/Math.pow(-0, 13));
+
+ assertEquals(+Infinity, 1/Math.pow(-0, 3.1));
+ assertEquals(+Infinity, 1/Math.pow(-0, 2));
+
+ assertEquals(-Infinity, Math.pow(-0, -3));
+ assertEquals(-Infinity, Math.pow(-0, -13));
+
+ assertEquals(Infinity, Math.pow(-0, -3.1));
+ assertEquals(Infinity, Math.pow(-0, -2));
+
+ assertEquals(NaN, Math.pow(-0.00001, 1.1));
+ assertEquals(NaN, Math.pow(-0.00001, -1.1));
+ assertEquals(NaN, Math.pow(-1.1, 1.1));
+ assertEquals(NaN, Math.pow(-1.1, -1.1));
+ assertEquals(NaN, Math.pow(-2, 1.1));
+ assertEquals(NaN, Math.pow(-2, -1.1));
+ assertEquals(NaN, Math.pow(-1000, 1.1));
+ assertEquals(NaN, Math.pow(-1000, -1.1));
+
+ assertEquals(+Infinity, 1/Math.pow(-0, 0.5));
+ assertEquals(+Infinity, 1/Math.pow(-0, 0.6));
+ assertEquals(-Infinity, 1/Math.pow(-0, 1));
+ assertEquals(-Infinity, 1/Math.pow(-0, 10000000001));
+
+ assertEquals(+Infinity, Math.pow(-0, -0.5));
+ assertEquals(+Infinity, Math.pow(-0, -0.6));
+ assertEquals(-Infinity, Math.pow(-0, -1));
+ assertEquals(-Infinity, Math.pow(-0, -10000000001));
+
+ assertEquals(4, Math.pow(16, 0.5));
+ assertEquals(NaN, Math.pow(-16, 0.5));
+ assertEquals(0.25, Math.pow(16, -0.5));
+ assertEquals(NaN, Math.pow(-16, -0.5));
+
+ // Test detecting and converting integer value as double.
+ assertEquals(8, Math.pow(2, Math.sqrt(9)));
+
+ // Tests from Mozilla 15.8.2.13.
+ assertEquals(2, Math.pow.length);
+ assertEquals(NaN, Math.pow());
+ assertEquals(1, Math.pow(null, null));
+ assertEquals(NaN, Math.pow(void 0, void 0));
+ assertEquals(1, Math.pow(true, false));
+ assertEquals(0, Math.pow(false, true));
+ assertEquals(Infinity, Math.pow(-Infinity, Infinity));
+ assertEquals(0, Math.pow(-Infinity, -Infinity));
+ assertEquals(1, Math.pow(0, 0));
+ assertEquals(0, Math.pow(0, Infinity));
+ assertEquals(NaN, Math.pow(NaN, 0.5));
+ assertEquals(NaN, Math.pow(NaN, -0.5));
+
+ // Tests from Sputnik S8.5_A13_T1.
+ assertTrue(
+ (1*((Math.pow(2,53))-1)*(Math.pow(2,-1074))) === 4.4501477170144023e-308);
+ assertTrue(
+ (1*(Math.pow(2,52))*(Math.pow(2,-1074))) === 2.2250738585072014e-308);
+ assertTrue(
+ (-1*(Math.pow(2,52))*(Math.pow(2,-1074))) === -2.2250738585072014e-308);
+}
+
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+test(); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/mjsunit.js b/src/3rdparty/v8/test/mjsunit/mjsunit.js
index 6f6e323..033c78f 100644
--- a/src/3rdparty/v8/test/mjsunit/mjsunit.js
+++ b/src/3rdparty/v8/test/mjsunit/mjsunit.js
@@ -221,6 +221,8 @@ var assertUnreachable;
assertSame = function assertSame(expected, found, name_opt) {
+ // TODO(mstarzinger): We should think about using Harmony's egal operator
+ // or the function equivalent Object.is() here.
if (found === expected) {
if (expected !== 0 || (1 / expected) == (1 / found)) return;
} else if ((expected !== expected) && (found !== found)) {
diff --git a/src/3rdparty/v8/test/mjsunit/mjsunit.status b/src/3rdparty/v8/test/mjsunit/mjsunit.status
index 32989c2..a1b9270 100644
--- a/src/3rdparty/v8/test/mjsunit/mjsunit.status
+++ b/src/3rdparty/v8/test/mjsunit/mjsunit.status
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -32,7 +32,6 @@ bugs: FAIL
##############################################################################
# Fails.
-harmony/proxies-function: FAIL
regress/regress-1119: FAIL
##############################################################################
@@ -48,16 +47,16 @@ regress/regress-create-exception: PASS, SKIP if $mode == debug
##############################################################################
# This one uses a built-in that's only present in debug mode. It takes
-# too long to run in debug mode on ARM.
-fuzz-natives: PASS, SKIP if ($mode == release || $arch == arm)
+# too long to run in debug mode on ARM and MIPS.
+fuzz-natives: PASS, SKIP if ($mode == release || $arch == arm || $arch == mips)
big-object-literal: PASS, SKIP if ($arch == arm)
# Issue 488: this test sometimes times out.
array-constructor: PASS || TIMEOUT
-# Very slow on ARM, contains no architecture dependent code.
-unicode-case-overoptimization: PASS, TIMEOUT if ($arch == arm)
+# Very slow on ARM and MIPS, contains no architecture dependent code.
+unicode-case-overoptimization: PASS, TIMEOUT if ($arch == arm || $arch == mips)
# Skip long running test in debug and allow it to timeout in release mode.
regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
@@ -66,6 +65,16 @@ regress/regress-524: (PASS || TIMEOUT), SKIP if $mode == debug
debug-liveedit-check-stack: SKIP
debug-liveedit-patch-positions-replace: SKIP
+# Test Crankshaft compilation time. Expected to take too long in debug mode.
+regress/regress-1969: PASS, SKIP if $mode == debug
+
+##############################################################################
+[ $isolates ]
+
+# This test sets the umask on a per-process basis and hence cannot be
+# used in multi-threaded runs.
+d8-os: SKIP
+
##############################################################################
[ $arch == arm ]
@@ -120,11 +129,23 @@ regress/regress-1132: SKIP
##############################################################################
[ $arch == mips ]
-# Run those tests, but expect them to time out.
-array-sort: PASS || TIMEOUT
+
+# Slow tests which times out in debug mode.
+try: PASS, SKIP if $mode == debug
+debug-scripts-request: PASS, SKIP if $mode == debug
+array-constructor: PASS, SKIP if $mode == debug
+
+# Times out often in release mode on MIPS.
+compiler/regress-stacktrace-methods: PASS, PASS || TIMEOUT if $mode == release
+array-splice: PASS || TIMEOUT
+
+# Long running test.
mirror-object: PASS || TIMEOUT
+string-indexof-2: PASS || TIMEOUT
-# Skip long-running tests.
+# BUG(3251035): Timeouts in long looping crankshaft optimization
+# tests. Skipping because having them timeout takes too long on the
+# buildbot.
compiler/alloc-number: SKIP
compiler/array-length: SKIP
compiler/assignment-deopt: SKIP
@@ -149,12 +170,8 @@ regress/regress-634: SKIP
regress/regress-create-exception: SKIP
regress/regress-3218915: SKIP
regress/regress-3247124: SKIP
-regress/regress-1132: SKIP
-regress/regress-1257: SKIP
-regress/regress-91008: SKIP
-##############################################################################
-[ $isolates ]
-# d8-os writes temporary files that might interfer with each other when running
-# in multible threads. Skip this if running with isolates testing.
-d8-os: SKIP
+# Requires bigger stack size in the Genesis and if stack size is increased,
+# the test requires too much time to run. However, the problem test covers
+# should be platform-independent.
+regress/regress-1132: SKIP
diff --git a/src/3rdparty/v8/test/mjsunit/number-is.js b/src/3rdparty/v8/test/mjsunit/number-is.js
new file mode 100644
index 0000000..1589fc6
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/number-is.js
@@ -0,0 +1,58 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test Harmony Number.isFinite() and Number.isNaN() functions.
+
+assertTrue(Number.isFinite(0));
+assertTrue(Number.isFinite(Number.MIN_VALUE));
+assertTrue(Number.isFinite(Number.MAX_VALUE));
+assertFalse(Number.isFinite(Number.NaN));
+assertFalse(Number.isFinite(Number.POSITIVE_INFINITY));
+assertFalse(Number.isFinite(Number.NEGATIVE_INFINITY));
+assertFalse(Number.isFinite(new Number(0)));
+assertFalse(Number.isFinite(1/0));
+assertFalse(Number.isFinite(-1/0));
+assertFalse(Number.isFinite({}));
+assertFalse(Number.isFinite([]));
+assertFalse(Number.isFinite("s"));
+assertFalse(Number.isFinite(null));
+assertFalse(Number.isFinite(undefined));
+
+assertFalse(Number.isNaN(0));
+assertFalse(Number.isNaN(Number.MIN_VALUE));
+assertFalse(Number.isNaN(Number.MAX_VALUE));
+assertTrue(Number.isNaN(Number.NaN));
+assertFalse(Number.isNaN(Number.POSITIVE_INFINITY));
+assertFalse(Number.isNaN(Number.NEGATIVE_INFINITY));
+assertFalse(Number.isNaN(new Number(0)));
+assertFalse(Number.isNaN(1/0));
+assertFalse(Number.isNaN(-1/0));
+assertFalse(Number.isNaN({}));
+assertFalse(Number.isNaN([]));
+assertFalse(Number.isNaN("s"));
+assertFalse(Number.isNaN(null));
+assertFalse(Number.isNaN(undefined));
diff --git a/src/3rdparty/v8/test/mjsunit/object-define-property.js b/src/3rdparty/v8/test/mjsunit/object-define-property.js
index ee6083a..fdaf82d 100644
--- a/src/3rdparty/v8/test/mjsunit/object-define-property.js
+++ b/src/3rdparty/v8/test/mjsunit/object-define-property.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -503,7 +503,7 @@ try {
// Defining properties null should fail even when we have
// other allowed values
try {
- %DefineOrRedefineAccessorProperty(null, 'foo', 0, func, 0);
+ %DefineOrRedefineAccessorProperty(null, 'foo', func, null, 0);
} catch (e) {
assertTrue(/illegal access/.test(e));
}
@@ -1053,4 +1053,35 @@ for (var i = 0; i < 1000; i++) {
// Non-enumerable property forces dictionary mode.
Object.defineProperty(o, i, {value: i, enumerable: false});
}
-assertEquals(999, o[999]); \ No newline at end of file
+assertEquals(999, o[999]);
+
+
+// Regression test: Bizzare behavior on non-strict arguments object.
+(function test(arg0) {
+ // Here arguments[0] is a fast alias on arg0.
+ Object.defineProperty(arguments, "0", {
+ value:1,
+ enumerable:false
+ });
+ // Here arguments[0] is a slow alias on arg0.
+ Object.defineProperty(arguments, "0", {
+ value:2,
+ writable:false
+ });
+ // Here arguments[0] is no alias at all.
+ Object.defineProperty(arguments, "0", {
+ value:3
+ });
+ assertEquals(2, arg0);
+ assertEquals(3, arguments[0]);
+})(0);
+
+
+// Regression test: We should never observe the hole value.
+var objectWithGetter = {};
+objectWithGetter.__defineGetter__('foo', function() {});
+assertEquals(undefined, objectWithGetter.__lookupSetter__('foo'));
+
+var objectWithSetter = {};
+objectWithSetter.__defineSetter__('foo', function(x) {});
+assertEquals(undefined, objectWithSetter.__lookupGetter__('foo'));
diff --git a/src/3rdparty/v8/test/mjsunit/object-is.js b/src/3rdparty/v8/test/mjsunit/object-is.js
new file mode 100644
index 0000000..b9fdc84
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/object-is.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test both the Harmony egal operator and it's function equivalent.
+
+function TestEgal(expected, x, y) {
+ // TODO(mstarzinger): Once we have the egal operator, we can test it here.
+ assertSame(expected, Object.is(x, y));
+}
+
+var test_set = [ {}, [], 1/0, -1/0, "s", 0, 0/-1, null, undefined ];
+print(test_set);
+for (var i = 0; i < test_set.length; i++) {
+ for (var j = 0; j < test_set.length; j++) {
+ if (i == j) {
+ assertSame(test_set[i], test_set[j]);
+ TestEgal(true, test_set[i], test_set[j]);
+ } else {
+ TestEgal(false, test_set[i], test_set[j]);
+ TestEgal(false, test_set[j], test_set[i]);
+ }
+ }
+}
diff --git a/src/3rdparty/v8/test/mjsunit/object-prevent-extensions.js b/src/3rdparty/v8/test/mjsunit/object-prevent-extensions.js
index 322a2cb..6b9184d 100644
--- a/src/3rdparty/v8/test/mjsunit/object-prevent-extensions.js
+++ b/src/3rdparty/v8/test/mjsunit/object-prevent-extensions.js
@@ -114,3 +114,15 @@ Object.preventExtensions(foo);
foo.x = 29;
assertEquals(undefined, foo.x);
+
+// when Object.isExtensible(o) === false
+// assignment should return right hand side value
+var o = Object.preventExtensions({});
+var v = o.v = 50;
+assertEquals(undefined, o.v);
+assertEquals(50, v);
+
+// test same behavior as above, but for integer properties
+var n = o[0] = 100;
+assertEquals(undefined, o[0]);
+assertEquals(100, n);
diff --git a/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js b/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js
new file mode 100644
index 0000000..ef5a10b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/pixel-array-rounding.js
@@ -0,0 +1,44 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var pixels = new PixelArray(8);
+
+function f() {
+ for (var i = 0; i < 8; i++) {
+ pixels[i] = (i * 1.1);
+ }
+ return pixels[1] + pixels[6];
+}
+
+f();
+f();
+assertEquals(6, pixels[5]);
+%OptimizeFunctionOnNextCall(f);
+f();
+assertEquals(6, pixels[5]);
diff --git a/src/3rdparty/v8/test/mjsunit/regexp.js b/src/3rdparty/v8/test/mjsunit/regexp.js
index 3c4f883..ec82c96 100644
--- a/src/3rdparty/v8/test/mjsunit/regexp.js
+++ b/src/3rdparty/v8/test/mjsunit/regexp.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -127,6 +127,17 @@ assertTrue(re.test("$"));
assertTrue(/^[Z-\c-e]*$/.test("Z[\\cde"));
+// Test that we handle \s and \S correctly on special Unicode characters.
+re = /\s/;
+assertTrue(re.test("\u2028"));
+assertTrue(re.test("\u2029"));
+assertTrue(re.test("\uFEFF"));
+
+re = /\S/;
+assertFalse(re.test("\u2028"));
+assertFalse(re.test("\u2029"));
+assertFalse(re.test("\uFEFF"));
+
// Test that we handle \s and \S correctly inside some bizarre
// character classes.
re = /[\s-:]/;
@@ -690,3 +701,7 @@ assertThrows("RegExp('(*)')");
assertThrows("RegExp('(?:*)')");
assertThrows("RegExp('(?=*)')");
assertThrows("RegExp('(?!*)')");
+
+// Test trimmed regular expression for RegExp.test().
+assertTrue(/.*abc/.test("abc"));
+assertFalse(/.*\d+/.test("q"));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-102153.js b/src/3rdparty/v8/test/mjsunit/regress/regress-102153.js
new file mode 100644
index 0000000..0f67656
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-102153.js
@@ -0,0 +1,57 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test that the break point is set before initializing the loop variable
+// so that we break before any iteration has been run.
+
+Debug = debug.Debug;
+
+var break_hit = false;
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ break_hit = true;
+ }
+}
+
+Debug.setListener(listener);
+
+function test() {
+ for (var i = 0; i < 3; i++) { // Break here.
+ if (i == 0) break;
+ }
+}
+
+Debug.setBreakPoint(test, 1, 0);
+
+assertTrue(Debug.showBreakPoints(test).indexOf("// Break here.") >= 0);
+
+test();
+
+assertTrue(break_hit);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-103259.js b/src/3rdparty/v8/test/mjsunit/regress/regress-103259.js
new file mode 100644
index 0000000..447073c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-103259.js
@@ -0,0 +1,36 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var a = [];
+a[8192] = '';
+assertTrue(%HasDictionaryElements(a));
+var uc16 = '\u0094';
+var test = uc16;
+for (var i = 0; i < 13; i++) test += test;
+assertEquals(test, a.join(uc16));
diff --git a/src/3rdparty/v8/src/extensions/experimental/i18n-locale.h b/src/3rdparty/v8/test/mjsunit/regress/regress-108296.js
index 607818c..38ecda7 100644
--- a/src/3rdparty/v8/src/extensions/experimental/i18n-locale.h
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-108296.js
@@ -25,36 +25,28 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
-
-#include "include/v8.h"
-
-namespace v8 {
-namespace internal {
-
-class I18NLocale {
- public:
- I18NLocale() {}
-
- // Implementations of window.Locale methods.
- static v8::Handle<v8::Value> JSLocale(const v8::Arguments& args);
-
- // Infers region id given the locale id, or uses user specified region id.
- // Result is canonicalized.
- // Returns status of ICU operation (maximizing locale or get region call).
- static bool GetBestMatchForRegionID(
- const char* locale_id, v8::Handle<v8::Value> regions, char* result);
-
- private:
- // Key name for localeID parameter.
- static const char* const kLocaleID;
- // Key name for regionID parameter.
- static const char* const kRegionID;
- // Key name for the icuLocaleID result.
- static const char* const kICULocaleID;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_I18N_LOCALE_H_
+// Flags: --allow-natives-syntax
+
+// This test checks that young immediates embedded into code objects
+// are referenced through a cell.
+
+function f (k, a, b) {
+ // Create control flow for a.foo. Control flow resolution will
+ // be generated as a part of a gap move. Gap move operate on immediates as
+ // a.foo is a CONSTANT_FUNCTION.
+ var x = k ? a.foo : a.foo;
+ return x.prototype;
+}
+
+var a = { };
+
+// Make sure that foo is a CONSTANT_FUNCTION but not be pretenured.
+a.foo = (function () { return function () {}; })();
+
+// Ensure that both branches of ternary operator have monomorphic type feedback.
+f(true, a, a);
+f(true, a, a);
+f(false, a, a);
+f(false, a, a);
+%OptimizeFunctionOnNextCall(f);
+f(true, a, a);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-109195.js b/src/3rdparty/v8/test/mjsunit/regress/regress-109195.js
new file mode 100644
index 0000000..97538aa
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-109195.js
@@ -0,0 +1,65 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+var Debug = debug.Debug;
+
+function listener(event, exec_state, event_data, data) {
+ for (var i = 0, n = exec_state.frameCount(); i < n; i++) {
+ exec_state.frame().scopeCount(i);
+ }
+ exec_state.prepareStep(Debug.StepAction.Continue, 1);
+}
+
+Debug.setListener(listener);
+
+var F = function () {
+ 1, function () {
+ var d = 0;
+ (function () { d; });
+ debugger;
+ }();
+};
+
+var src = "(" + F.toString() + ")()";
+eval(src);
+
+Function.prototype.__defineGetter__("f", function () {
+ debugger;
+ return 0;
+});
+
+var G = function () {
+ 1, function () {
+ var d = 0;
+ (function () { d; });
+ debugger;
+ }['f'];
+};
+
+var src = "(" + G.toString() + ")()";
+eval(src);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-110509.js b/src/3rdparty/v8/test/mjsunit/regress/regress-110509.js
new file mode 100644
index 0000000..132bd23
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-110509.js
@@ -0,0 +1,41 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Verify that LRandom preserves rsi correctly.
+
+function foo() {
+ Math.random();
+ new Function("");
+}
+
+foo();
+foo();
+foo();
+%OptimizeFunctionOnNextCall(foo);
+foo();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1110.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1110.js
index 43b8d77..124f520 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1110.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1110.js
@@ -28,10 +28,9 @@
// Test that the illegal continue is thrown at parse time.
try {
- function Crash() { continue;if (Crash) {
- } }
+ eval("function Crash() { assertUnreachable(); continue;if (Crash) { } }");
Crash();
- assertTrue(false);
+ assertUnreachable();
} catch (e) {
assertTrue(e instanceof SyntaxError);
assertTrue(/continue/.test(e.message));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-113924.js b/src/3rdparty/v8/test/mjsunit/regress/regress-113924.js
new file mode 100644
index 0000000..3ecdec4
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-113924.js
@@ -0,0 +1,31 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var count=12000;
+while(count--) {
+ eval("var a = new Object(10); a[2] += 7;");
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-115452.js b/src/3rdparty/v8/test/mjsunit/regress/regress-115452.js
new file mode 100644
index 0000000..7e424ed
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-115452.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that a function declaration cannot overwrite a read-only property.
+
+print(0)
+function foobl() {}
+assertTrue(typeof this.foobl == "function");
+assertTrue(Object.getOwnPropertyDescriptor(this, "foobl").writable);
+
+print(1)
+Object.defineProperty(this, "foobl", {value: 1, writable: false});
+assertSame(1, this.foobl);
+assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
+
+print(2)
+eval("function foobl() {}");
+assertSame(1, this.foobl);
+assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
+
+print(3)
+eval("function foobl() {}");
+assertSame(1, this.foobl);
+assertFalse(Object.getOwnPropertyDescriptor(this, "foobl").writable);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-117794.js b/src/3rdparty/v8/test/mjsunit/regress/regress-117794.js
new file mode 100644
index 0000000..5e11b40
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-117794.js
@@ -0,0 +1,57 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Loads specialized to be from the global object should not omit the
+// smi check on the receiver. The code below should not crash.
+
+print = function() {}
+
+function constructor() {};
+
+function assertHasOwnProperties(object, limit) {
+ for (var i = 0; i < limit; i++) { }
+}
+
+try {
+ Object.keys();
+} catch(exc2) {
+ print(exc2.stack);
+}
+
+var x1 = new Object();
+
+try {
+ new Function("A Man Called Horse", x1.d);
+} catch(exc3) {
+ print(exc3.stack);
+}
+
+try {
+ (-(true)).toPrecision(0x30, 'lib1-f1');
+} catch(exc1) {
+ print(exc1.stack);
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-119429.js b/src/3rdparty/v8/test/mjsunit/regress/regress-119429.js
new file mode 100644
index 0000000..a876487
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-119429.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var d = 0;
+function recurse() {
+ if (++d == 25135) { // A magic number just below stack overflow on ia32
+ %DebugBreak();
+ }
+ recurse();
+}
+assertThrows(function() { recurse();} );
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-119925.js b/src/3rdparty/v8/test/mjsunit/regress/regress-119925.js
new file mode 100644
index 0000000..6712754
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-119925.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that the throw is not inlined if object literals cannot be
+// inlined.
+Array.prototype.__proto__ = { 77e4 : null };
+function continueWithinLoop() {
+ for (var key in [(1.2)]) { }
+};
+continueWithinLoop();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-120099.js b/src/3rdparty/v8/test/mjsunit/regress/regress-120099.js
new file mode 100644
index 0000000..3b06f4d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-120099.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+'use strict';
+
+var a = Object.create(Object.prototype);
+var b = Object.create(Object.prototype);
+assertFalse(a === b);
+
+Object.defineProperty(a, 'x', { value: 1 });
+assertTrue(a.x === 1);
+assertTrue(b.x === undefined);
+
+b.x = 2;
+assertTrue(a.x === 1);
+assertTrue(b.x === 2);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-121407.js b/src/3rdparty/v8/test/mjsunit/regress/regress-121407.js
new file mode 100644
index 0000000..25033fb
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-121407.js
@@ -0,0 +1,40 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var a = [0,1,2,3];
+a[2000000] = 2000000;
+a.length=2000;
+for (var i = 0; i <= 256; i++) {
+ a[i] = new Object();
+}
+
+a = [0.5,1.5,2.5,3.5,4.5,5.5];
+a[2000000] = 2000000;
+a.length=2000;
+for (var i = 0; i <= 256; i++) {
+ a[i] = new Object();
+} \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1229.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1229.js
index c0dcba9..5447f3f 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-1229.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1229.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -29,59 +29,118 @@
// Check that %NewObjectFromBound works correctly when called from optimized
// frame.
-function foo(x, y, z) {
+function foo1(x, y, z) {
assertEquals(1, x);
assertEquals(2, y);
assertEquals(3, z);
}
-var foob = foo.bind({}, 1);
+function foo2(x, y, z) {
+ assertEquals(1, x);
+ assertEquals(2, y);
+ assertEquals(undefined, z);
+}
+
+function foo3(x, y, z) {
+ assertEquals(1, x);
+ assertEquals(2, y);
+ assertEquals(3, z);
+}
-function f(y, z) {
- return %NewObjectFromBound(foob);
+
+var foob1 = foo1.bind({}, 1);
+var foob2 = foo2.bind({}, 1);
+var foob3 = foo3.bind({}, 1);
+
+
+function f1(y, z) {
+ return %NewObjectFromBound(foob1);
+}
+
+function f2(y, z) {
+ return %NewObjectFromBound(foob2);
+}
+
+function f3(y, z) {
+ return %NewObjectFromBound(foob3);
}
// Check that %NewObjectFromBound looks at correct frame for inlined function.
-function g(z, y) {
- return f(y, z); /* f should be inlined into g, note rotated arguments */
+function g1(z, y) {
+ return f1(y, z); /* f should be inlined into g, note rotated arguments */
+}
+
+function g2(z, y, x) {
+ return f2(y); /* f should be inlined into g, note argument count mismatch */
+}
+
+function g3(z, y, x) {
+ return f3(x, y, z); /* f should be inlined into g, note argument count mismatch */
}
// Check that %NewObjectFromBound looks at correct frame for inlined function.
function ff(x) { }
-function h(z2, y2) {
+function h1(z2, y2) {
+ var local_z = z2 >> 1;
+ ff(local_z);
+ var local_y = y2 >> 1;
+ ff(local_y);
+ return f1(local_y, local_z); /* f should be inlined into h */
+}
+
+function h2(z2, y2, x2) {
+ var local_z = z2 >> 1;
+ ff(local_z);
+ var local_y = y2 >> 1;
+ ff(local_y);
+ return f2(local_y); /* f should be inlined into h */
+}
+
+function h3(z2, y2, x2) {
var local_z = z2 >> 1;
ff(local_z);
var local_y = y2 >> 1;
ff(local_y);
- return f(local_y, local_z); /* f should be inlined into h */
+ var local_x = x2 >> 1;
+ ff(local_x);
+ return f3(local_x, local_y, local_z); /* f should be inlined into h */
}
-for (var i = 0; i < 5; i++) f(2, 3);
-%OptimizeFunctionOnNextCall(f);
-f(2, 3);
-for (var i = 0; i < 5; i++) g(3, 2);
-%OptimizeFunctionOnNextCall(g);
-g(3, 2);
+function invoke(f, args) {
+ for (var i = 0; i < 5; i++) f.apply(this, args);
+ %OptimizeFunctionOnNextCall(f);
+ f.apply(this, args);
+}
-for (var i = 0; i < 5; i++) h(6, 4);
-%OptimizeFunctionOnNextCall(h);
-h(6, 4);
+invoke(f1, [2, 3]);
+invoke(f2, [2]);
+invoke(f3, [2, 3, 4]);
+invoke(g1, [3, 2]);
+invoke(g2, [3, 2, 4]);
+invoke(g3, [4, 3, 2]);
+invoke(h1, [6, 4]);
+invoke(h2, [6, 4, 8]);
+invoke(h3, [8, 6, 4]);
// Check that %_IsConstructCall returns correct value when inlined
var NON_CONSTRUCT_MARKER = {};
var CONSTRUCT_MARKER = {};
-function baz() {
+function baz(x) {
return (!%_IsConstructCall()) ? NON_CONSTRUCT_MARKER : CONSTRUCT_MARKER;
}
function bar(x, y, z) {
+ var non_construct = baz(0); /* baz should be inlined */
+ assertSame(non_construct, NON_CONSTRUCT_MARKER);
var non_construct = baz(); /* baz should be inlined */
- assertEquals(non_construct, NON_CONSTRUCT_MARKER);
- var construct = new baz();
- assertEquals(construct, CONSTRUCT_MARKER);
+ assertSame(non_construct, NON_CONSTRUCT_MARKER);
+ var non_construct = baz(0, 0); /* baz should be inlined */
+ assertSame(non_construct, NON_CONSTRUCT_MARKER);
+ var construct = new baz(0);
+ assertSame(construct, CONSTRUCT_MARKER);
+ var construct = new baz(0, 0);
+ assertSame(construct, CONSTRUCT_MARKER);
}
-for (var i = 0; i < 5; i++) new bar(1, 2, 3);
-%OptimizeFunctionOnNextCall(bar);
-bar(1, 2, 3);
+invoke(bar, [1, 2, 3]);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1530.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1530.js
new file mode 100644
index 0000000..db21144
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1530.js
@@ -0,0 +1,69 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that redefining the 'prototype' property of a function object
+// does actually set the internal value and does not screw up any
+// shadowing between said property and the internal value.
+
+var f = function() {};
+
+// Verify that normal assignment of 'prototype' property works properly
+// and updates the internal value.
+var x = { foo: 'bar' };
+f.prototype = x;
+assertSame(f.prototype, x);
+assertSame(f.prototype.foo, 'bar');
+assertSame(new f().foo, 'bar');
+assertSame(Object.getPrototypeOf(new f()), x);
+assertSame(Object.getOwnPropertyDescriptor(f, 'prototype').value, x);
+
+// Verify that 'prototype' behaves like a data property when it comes to
+// redefining with Object.defineProperty() and the internal value gets
+// updated.
+var y = { foo: 'baz' };
+Object.defineProperty(f, 'prototype', { value: y, writable: true });
+assertSame(f.prototype, y);
+assertSame(f.prototype.foo, 'baz');
+assertSame(new f().foo, 'baz');
+assertSame(Object.getPrototypeOf(new f()), y);
+assertSame(Object.getOwnPropertyDescriptor(f, 'prototype').value, y);
+
+// Verify that the previous redefinition didn't screw up callbacks and
+// the internal value still gets updated.
+var z = { foo: 'other' };
+f.prototype = z;
+assertSame(f.prototype, z);
+assertSame(f.prototype.foo, 'other');
+assertSame(new f().foo, 'other');
+assertSame(Object.getPrototypeOf(new f()), z);
+assertSame(Object.getOwnPropertyDescriptor(f, 'prototype').value, z);
+
+// Verify that non-writability of other properties is respected.
+assertThrows("Object.defineProperty(f, 'name', { value: {} })");
+assertThrows("Object.defineProperty(f, 'length', { value: {} })");
+assertThrows("Object.defineProperty(f, 'caller', { value: {} })");
+assertThrows("Object.defineProperty(f, 'arguments', { value: {} })");
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1624-strict.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1624-strict.js
new file mode 100644
index 0000000..8bc58d5
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1624-strict.js
@@ -0,0 +1,140 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that global eval calls of strict code (independent from whether being
+// direct or indirect) have their own lexical and variable environment.
+
+"use strict";
+var evil = eval;
+
+// Test global direct strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+eval('"use strict"; var no_touch = 1;');
+assertSame(0, no_touch);
+
+// Test global indirect strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+evil('"use strict"; var no_touch = 2;');
+assertSame(0, no_touch);
+
+// Test global direct non-strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+eval('var no_touch = 3;');
+assertSame(0, no_touch);
+
+// Test global indirect non-strict eval in strict script.
+// Expects global environment.
+var no_touch = 0;
+evil('var no_touch = 4;');
+assertSame(4, no_touch);
+
+// Test non-global direct strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ eval('"use strict"; var no_touch = 5;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ evil('"use strict"; var no_touch = 6;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global direct non-strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ eval('var no_touch = 7;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect non-strict eval in strict script.
+// Expects global environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ evil('var no_touch = 8;');
+ assertSame(0, no_touch);
+})()
+assertSame(8, no_touch);
+
+// Test non-global direct strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ eval('"use strict"; var no_touch = 9;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ evil('"use strict"; var no_touch = 10;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global direct non-strict eval in strict script.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ eval('var no_touch = 11;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect non-strict eval in strict script.
+// Expects global environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ evil('var no_touch = 12;');
+ assertSame(0, no_touch);
+})()
+assertSame(12, no_touch);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1624.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1624.js
new file mode 100644
index 0000000..987e036
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1624.js
@@ -0,0 +1,139 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that global eval calls of strict code (independent from whether being
+// direct or indirect) have their own lexical and variable environment.
+
+var evil = eval;
+
+// Test global direct strict eval.
+// Expects new environment.
+var no_touch = 0;
+eval('"use strict"; var no_touch = 1;');
+assertSame(0, no_touch);
+
+// Test global indirect strict eval.
+// Expects new environment.
+var no_touch = 0;
+evil('"use strict"; var no_touch = 2;');
+assertSame(0, no_touch);
+
+// Test global direct non-strict eval.
+// Expects global environment.
+var no_touch = 0;
+eval('var no_touch = 3;');
+assertSame(3, no_touch);
+
+// Test global indirect non-strict eval.
+// Expects global environment.
+var no_touch = 0;
+evil('var no_touch = 4;');
+assertSame(4, no_touch);
+
+// Test non-global direct strict eval in non-strict function.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ eval('"use strict"; var no_touch = 5;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect strict eval in non-strict function.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ evil('"use strict"; var no_touch = 6;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global direct non-strict eval in non-strict function.
+// Expects function environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ eval('var no_touch = 7;');
+ assertSame(7, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect non-strict eval in non-strict function.
+// Expects global environment.
+var no_touch = 0;
+(function() {
+ var no_touch = 0;
+ evil('var no_touch = 8;');
+ assertSame(0, no_touch);
+})()
+assertSame(8, no_touch);
+
+// Test non-global direct strict eval in strict function.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ eval('"use strict"; var no_touch = 9;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect strict eval in strict function.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ evil('"use strict"; var no_touch = 10;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global direct non-strict eval in strict function.
+// Expects new environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ eval('var no_touch = 11;');
+ assertSame(0, no_touch);
+})()
+assertSame(0, no_touch);
+
+// Test non-global indirect non-strict eval in strict function.
+// Expects global environment.
+var no_touch = 0;
+(function() {
+ "use strict";
+ var no_touch = 0;
+ evil('var no_touch = 12;');
+ assertSame(0, no_touch);
+})()
+assertSame(12, no_touch);
diff --git a/src/3rdparty/v8/src/extensions/experimental/number-format.h b/src/3rdparty/v8/test/mjsunit/regress/regress-1790.js
index bcfaed6..8848eea 100644
--- a/src/3rdparty/v8/src/extensions/experimental/number-format.h
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1790.js
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -25,47 +25,34 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_
-#define V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_
-
-#include "include/v8.h"
-
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class DecimalFormat;
+// Regression test checking that the sequence of element access in built-in
+// array functions is specification conform (i.e. [[HasProperty]] might return
+// bogus result after [[Get]] has been called).
+
+function CheckSequence(builtin, callback) {
+ var array = [1,2,3];
+ var callback_count = 0;
+ var callback_wrapper = function() {
+ callback_count++;
+ return callback()
+ }
+
+ // Define getter that will delete itself upon first invocation.
+ Object.defineProperty(array, '1', {
+ get: function () { delete array[1]; },
+ configurable: true
+ });
+
+ assertTrue(array.hasOwnProperty('1'));
+ builtin.apply(array, [callback_wrapper, 'argument']);
+ assertFalse(array.hasOwnProperty('1'));
+ assertEquals(3, callback_count);
}
-namespace v8 {
-namespace internal {
-
-class NumberFormat {
- public:
- // 3-letter ISO 4217 currency code plus \0.
- static const int kCurrencyCodeLength;
-
- static v8::Handle<v8::Value> JSNumberFormat(const v8::Arguments& args);
-
- // Helper methods for various bindings.
-
- // Unpacks date format object from corresponding JavaScript object.
- static icu::DecimalFormat* UnpackNumberFormat(
- v8::Handle<v8::Object> obj);
-
- // Release memory we allocated for the NumberFormat once the JS object that
- // holds the pointer gets garbage collected.
- static void DeleteNumberFormat(v8::Persistent<v8::Value> object,
- void* param);
-
- // Formats number and returns corresponding string.
- static v8::Handle<v8::Value> Format(const v8::Arguments& args);
-
- private:
- NumberFormat();
-
- static v8::Persistent<v8::FunctionTemplate> number_format_template_;
-};
-
-} } // namespace v8::internal
-
-#endif // V8_EXTENSIONS_EXPERIMENTAL_NUMBER_FORMAT_H_
+CheckSequence(Array.prototype.every, function() { return true; });
+CheckSequence(Array.prototype.filter, function() { return true; });
+CheckSequence(Array.prototype.forEach, function() { return 0; });
+CheckSequence(Array.prototype.map, function() { return 0; });
+CheckSequence(Array.prototype.reduce, function() { return 0; });
+CheckSequence(Array.prototype.reduceRight, function() { return 0; });
+CheckSequence(Array.prototype.some, function() { return false; });
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js
new file mode 100644
index 0000000..176f918
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1849.js
@@ -0,0 +1,39 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1878
+
+// Flags: --allow-natives-syntax
+
+var count = 1e5;
+var arr = new Array(count);
+assertFalse(%HasFastDoubleElements(arr));
+for (var i = 0; i < count; i++) {
+ arr[i] = 0;
+}
+assertFalse(%HasFastDoubleElements(arr));
+assertTrue(%HasFastSmiOnlyElements(arr));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1853.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1853.js
new file mode 100644
index 0000000..f80bade
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1853.js
@@ -0,0 +1,116 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+// Test whether scripts compiled after setting the break point are
+// updated correctly.
+
+Debug = debug.Debug;
+
+var break_count = 0;
+var test_break_1 = false;
+var test_break_2 = false;
+
+function sendCommand(state, cmd) {
+ // Get the debug command processor in paused state.
+ var dcp = state.debugCommandProcessor(false);
+ var request = JSON.stringify(cmd);
+ var response = dcp.processDebugJSONRequest(request);
+ return JSON.parse(response);
+}
+
+function setBreakPointByName(state) {
+ sendCommand(state, {
+ seq: 0,
+ type: "request",
+ command: "setbreakpoint",
+ arguments: {
+ type: "script",
+ target: "testScriptOne",
+ line: 2
+ }
+ });
+}
+
+function setBreakPointByRegExp(state) {
+ sendCommand(state, {
+ seq: 0,
+ type: "request",
+ command: "setbreakpoint",
+ arguments: {
+ type: "scriptRegExp",
+ target: "Scrip.Two",
+ line: 2
+ }
+ });
+}
+
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ switch (break_count) {
+ case 0:
+ // Set break points before the code has been compiled.
+ setBreakPointByName(exec_state);
+ setBreakPointByRegExp(exec_state);
+ break;
+ case 1:
+ // Set the flag to prove that we hit the first break point.
+ test_break_1 = true;
+ break;
+ case 2:
+ // Set the flag to prove that we hit the second break point.
+ test_break_2 = true;
+ break;
+ }
+ break_count++;
+ }
+ } catch (e) {
+ print(e);
+ }
+}
+
+Debug.setListener(listener);
+debugger;
+
+eval('function test1() { \n' +
+ ' assertFalse(test_break_1); \n' +
+ ' assertTrue(test_break_1); \n' +
+ '} \n' +
+ '//@ sourceURL=testScriptOne');
+
+eval('function test2() { \n' +
+ ' assertFalse(test_break_2); \n' +
+ ' assertTrue(test_break_2); \n' +
+ '} \n' +
+ '//@ sourceURL=testScriptTwo');
+
+test1();
+test2();
+assertEquals(3, break_count);
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js
new file mode 100644
index 0000000..a1648b1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1878.js
@@ -0,0 +1,44 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1878
+
+// Flags: --allow-natives-syntax --expose_natives_as=natives
+
+var a = Array();
+
+for (var i = 0; i < 1000; i++) {
+ var ai = natives.InternalArray(10000);
+ assertFalse(%HaveSameMap(ai, a));
+ assertTrue(%HasFastElements(ai));
+}
+
+for (var i = 0; i < 1000; i++) {
+ var ai = new natives.InternalArray(10000);
+ assertFalse(%HaveSameMap(ai, a));
+ assertTrue(%HasFastElements(ai));
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1898.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1898.js
new file mode 100644
index 0000000..5440446
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1898.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function f(x) {
+ Math.log(Math.min(0.1, Math.abs(x)));
+}
+
+f(0.1);
+f(0.1);
+%OptimizeFunctionOnNextCall(f);
+f(0.1);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1924.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1924.js
new file mode 100644
index 0000000..8039541
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1924.js
@@ -0,0 +1,42 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// For http://code.google.com/p/v8/issues/detail?id=1924
+
+a: break a;
+a: b: break a;
+a: b: break b;
+assertThrows("a: break a a", SyntaxError)
+assertThrows("a: break a 1", SyntaxError)
+assertThrows("a: break a ''", SyntaxError)
+assertThrows("a: break a var b", SyntaxError)
+assertThrows("a: break a {}", SyntaxError)
+
+a: if (0) break a;
+b: if (0) {break b;} else {}
+c: if (0) break c; else {}
+d: if (0) break d; else break d;
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1945.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1945.js
new file mode 100644
index 0000000..bffc775
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1945.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var _d = new Date();
+_d.setHours(0,0,0,0);
+_d.setHours(0,0,0,0);
+%OptimizeFunctionOnNextCall(_d.setHours);
+_d.setHours(0,0,0,0);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js
new file mode 100644
index 0000000..2728c2c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1969.js
@@ -0,0 +1,5045 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+f();
+f();
+%OptimizeFunctionOnNextCall(f);
+var start = (new Date()).getTime();
+var array = f();
+var end = (new Date()).getTime();
+
+// Assert that recompiling and executing f() takes less than a second.
+assertTrue((end - start) < 1000);
+
+for (var i = 0; i < 5000; i++) assertEquals(0, array[i]);
+
+function f() {
+ var a = new Array(5000);
+ a[0]=0;
+ a[1]=0;
+ a[2]=0;
+ a[3]=0;
+ a[4]=0;
+ a[5]=0;
+ a[6]=0;
+ a[7]=0;
+ a[8]=0;
+ a[9]=0;
+ a[10]=0;
+ a[11]=0;
+ a[12]=0;
+ a[13]=0;
+ a[14]=0;
+ a[15]=0;
+ a[16]=0;
+ a[17]=0;
+ a[18]=0;
+ a[19]=0;
+ a[20]=0;
+ a[21]=0;
+ a[22]=0;
+ a[23]=0;
+ a[24]=0;
+ a[25]=0;
+ a[26]=0;
+ a[27]=0;
+ a[28]=0;
+ a[29]=0;
+ a[30]=0;
+ a[31]=0;
+ a[32]=0;
+ a[33]=0;
+ a[34]=0;
+ a[35]=0;
+ a[36]=0;
+ a[37]=0;
+ a[38]=0;
+ a[39]=0;
+ a[40]=0;
+ a[41]=0;
+ a[42]=0;
+ a[43]=0;
+ a[44]=0;
+ a[45]=0;
+ a[46]=0;
+ a[47]=0;
+ a[48]=0;
+ a[49]=0;
+ a[50]=0;
+ a[51]=0;
+ a[52]=0;
+ a[53]=0;
+ a[54]=0;
+ a[55]=0;
+ a[56]=0;
+ a[57]=0;
+ a[58]=0;
+ a[59]=0;
+ a[60]=0;
+ a[61]=0;
+ a[62]=0;
+ a[63]=0;
+ a[64]=0;
+ a[65]=0;
+ a[66]=0;
+ a[67]=0;
+ a[68]=0;
+ a[69]=0;
+ a[70]=0;
+ a[71]=0;
+ a[72]=0;
+ a[73]=0;
+ a[74]=0;
+ a[75]=0;
+ a[76]=0;
+ a[77]=0;
+ a[78]=0;
+ a[79]=0;
+ a[80]=0;
+ a[81]=0;
+ a[82]=0;
+ a[83]=0;
+ a[84]=0;
+ a[85]=0;
+ a[86]=0;
+ a[87]=0;
+ a[88]=0;
+ a[89]=0;
+ a[90]=0;
+ a[91]=0;
+ a[92]=0;
+ a[93]=0;
+ a[94]=0;
+ a[95]=0;
+ a[96]=0;
+ a[97]=0;
+ a[98]=0;
+ a[99]=0;
+ a[100]=0;
+ a[101]=0;
+ a[102]=0;
+ a[103]=0;
+ a[104]=0;
+ a[105]=0;
+ a[106]=0;
+ a[107]=0;
+ a[108]=0;
+ a[109]=0;
+ a[110]=0;
+ a[111]=0;
+ a[112]=0;
+ a[113]=0;
+ a[114]=0;
+ a[115]=0;
+ a[116]=0;
+ a[117]=0;
+ a[118]=0;
+ a[119]=0;
+ a[120]=0;
+ a[121]=0;
+ a[122]=0;
+ a[123]=0;
+ a[124]=0;
+ a[125]=0;
+ a[126]=0;
+ a[127]=0;
+ a[128]=0;
+ a[129]=0;
+ a[130]=0;
+ a[131]=0;
+ a[132]=0;
+ a[133]=0;
+ a[134]=0;
+ a[135]=0;
+ a[136]=0;
+ a[137]=0;
+ a[138]=0;
+ a[139]=0;
+ a[140]=0;
+ a[141]=0;
+ a[142]=0;
+ a[143]=0;
+ a[144]=0;
+ a[145]=0;
+ a[146]=0;
+ a[147]=0;
+ a[148]=0;
+ a[149]=0;
+ a[150]=0;
+ a[151]=0;
+ a[152]=0;
+ a[153]=0;
+ a[154]=0;
+ a[155]=0;
+ a[156]=0;
+ a[157]=0;
+ a[158]=0;
+ a[159]=0;
+ a[160]=0;
+ a[161]=0;
+ a[162]=0;
+ a[163]=0;
+ a[164]=0;
+ a[165]=0;
+ a[166]=0;
+ a[167]=0;
+ a[168]=0;
+ a[169]=0;
+ a[170]=0;
+ a[171]=0;
+ a[172]=0;
+ a[173]=0;
+ a[174]=0;
+ a[175]=0;
+ a[176]=0;
+ a[177]=0;
+ a[178]=0;
+ a[179]=0;
+ a[180]=0;
+ a[181]=0;
+ a[182]=0;
+ a[183]=0;
+ a[184]=0;
+ a[185]=0;
+ a[186]=0;
+ a[187]=0;
+ a[188]=0;
+ a[189]=0;
+ a[190]=0;
+ a[191]=0;
+ a[192]=0;
+ a[193]=0;
+ a[194]=0;
+ a[195]=0;
+ a[196]=0;
+ a[197]=0;
+ a[198]=0;
+ a[199]=0;
+ a[200]=0;
+ a[201]=0;
+ a[202]=0;
+ a[203]=0;
+ a[204]=0;
+ a[205]=0;
+ a[206]=0;
+ a[207]=0;
+ a[208]=0;
+ a[209]=0;
+ a[210]=0;
+ a[211]=0;
+ a[212]=0;
+ a[213]=0;
+ a[214]=0;
+ a[215]=0;
+ a[216]=0;
+ a[217]=0;
+ a[218]=0;
+ a[219]=0;
+ a[220]=0;
+ a[221]=0;
+ a[222]=0;
+ a[223]=0;
+ a[224]=0;
+ a[225]=0;
+ a[226]=0;
+ a[227]=0;
+ a[228]=0;
+ a[229]=0;
+ a[230]=0;
+ a[231]=0;
+ a[232]=0;
+ a[233]=0;
+ a[234]=0;
+ a[235]=0;
+ a[236]=0;
+ a[237]=0;
+ a[238]=0;
+ a[239]=0;
+ a[240]=0;
+ a[241]=0;
+ a[242]=0;
+ a[243]=0;
+ a[244]=0;
+ a[245]=0;
+ a[246]=0;
+ a[247]=0;
+ a[248]=0;
+ a[249]=0;
+ a[250]=0;
+ a[251]=0;
+ a[252]=0;
+ a[253]=0;
+ a[254]=0;
+ a[255]=0;
+ a[256]=0;
+ a[257]=0;
+ a[258]=0;
+ a[259]=0;
+ a[260]=0;
+ a[261]=0;
+ a[262]=0;
+ a[263]=0;
+ a[264]=0;
+ a[265]=0;
+ a[266]=0;
+ a[267]=0;
+ a[268]=0;
+ a[269]=0;
+ a[270]=0;
+ a[271]=0;
+ a[272]=0;
+ a[273]=0;
+ a[274]=0;
+ a[275]=0;
+ a[276]=0;
+ a[277]=0;
+ a[278]=0;
+ a[279]=0;
+ a[280]=0;
+ a[281]=0;
+ a[282]=0;
+ a[283]=0;
+ a[284]=0;
+ a[285]=0;
+ a[286]=0;
+ a[287]=0;
+ a[288]=0;
+ a[289]=0;
+ a[290]=0;
+ a[291]=0;
+ a[292]=0;
+ a[293]=0;
+ a[294]=0;
+ a[295]=0;
+ a[296]=0;
+ a[297]=0;
+ a[298]=0;
+ a[299]=0;
+ a[300]=0;
+ a[301]=0;
+ a[302]=0;
+ a[303]=0;
+ a[304]=0;
+ a[305]=0;
+ a[306]=0;
+ a[307]=0;
+ a[308]=0;
+ a[309]=0;
+ a[310]=0;
+ a[311]=0;
+ a[312]=0;
+ a[313]=0;
+ a[314]=0;
+ a[315]=0;
+ a[316]=0;
+ a[317]=0;
+ a[318]=0;
+ a[319]=0;
+ a[320]=0;
+ a[321]=0;
+ a[322]=0;
+ a[323]=0;
+ a[324]=0;
+ a[325]=0;
+ a[326]=0;
+ a[327]=0;
+ a[328]=0;
+ a[329]=0;
+ a[330]=0;
+ a[331]=0;
+ a[332]=0;
+ a[333]=0;
+ a[334]=0;
+ a[335]=0;
+ a[336]=0;
+ a[337]=0;
+ a[338]=0;
+ a[339]=0;
+ a[340]=0;
+ a[341]=0;
+ a[342]=0;
+ a[343]=0;
+ a[344]=0;
+ a[345]=0;
+ a[346]=0;
+ a[347]=0;
+ a[348]=0;
+ a[349]=0;
+ a[350]=0;
+ a[351]=0;
+ a[352]=0;
+ a[353]=0;
+ a[354]=0;
+ a[355]=0;
+ a[356]=0;
+ a[357]=0;
+ a[358]=0;
+ a[359]=0;
+ a[360]=0;
+ a[361]=0;
+ a[362]=0;
+ a[363]=0;
+ a[364]=0;
+ a[365]=0;
+ a[366]=0;
+ a[367]=0;
+ a[368]=0;
+ a[369]=0;
+ a[370]=0;
+ a[371]=0;
+ a[372]=0;
+ a[373]=0;
+ a[374]=0;
+ a[375]=0;
+ a[376]=0;
+ a[377]=0;
+ a[378]=0;
+ a[379]=0;
+ a[380]=0;
+ a[381]=0;
+ a[382]=0;
+ a[383]=0;
+ a[384]=0;
+ a[385]=0;
+ a[386]=0;
+ a[387]=0;
+ a[388]=0;
+ a[389]=0;
+ a[390]=0;
+ a[391]=0;
+ a[392]=0;
+ a[393]=0;
+ a[394]=0;
+ a[395]=0;
+ a[396]=0;
+ a[397]=0;
+ a[398]=0;
+ a[399]=0;
+ a[400]=0;
+ a[401]=0;
+ a[402]=0;
+ a[403]=0;
+ a[404]=0;
+ a[405]=0;
+ a[406]=0;
+ a[407]=0;
+ a[408]=0;
+ a[409]=0;
+ a[410]=0;
+ a[411]=0;
+ a[412]=0;
+ a[413]=0;
+ a[414]=0;
+ a[415]=0;
+ a[416]=0;
+ a[417]=0;
+ a[418]=0;
+ a[419]=0;
+ a[420]=0;
+ a[421]=0;
+ a[422]=0;
+ a[423]=0;
+ a[424]=0;
+ a[425]=0;
+ a[426]=0;
+ a[427]=0;
+ a[428]=0;
+ a[429]=0;
+ a[430]=0;
+ a[431]=0;
+ a[432]=0;
+ a[433]=0;
+ a[434]=0;
+ a[435]=0;
+ a[436]=0;
+ a[437]=0;
+ a[438]=0;
+ a[439]=0;
+ a[440]=0;
+ a[441]=0;
+ a[442]=0;
+ a[443]=0;
+ a[444]=0;
+ a[445]=0;
+ a[446]=0;
+ a[447]=0;
+ a[448]=0;
+ a[449]=0;
+ a[450]=0;
+ a[451]=0;
+ a[452]=0;
+ a[453]=0;
+ a[454]=0;
+ a[455]=0;
+ a[456]=0;
+ a[457]=0;
+ a[458]=0;
+ a[459]=0;
+ a[460]=0;
+ a[461]=0;
+ a[462]=0;
+ a[463]=0;
+ a[464]=0;
+ a[465]=0;
+ a[466]=0;
+ a[467]=0;
+ a[468]=0;
+ a[469]=0;
+ a[470]=0;
+ a[471]=0;
+ a[472]=0;
+ a[473]=0;
+ a[474]=0;
+ a[475]=0;
+ a[476]=0;
+ a[477]=0;
+ a[478]=0;
+ a[479]=0;
+ a[480]=0;
+ a[481]=0;
+ a[482]=0;
+ a[483]=0;
+ a[484]=0;
+ a[485]=0;
+ a[486]=0;
+ a[487]=0;
+ a[488]=0;
+ a[489]=0;
+ a[490]=0;
+ a[491]=0;
+ a[492]=0;
+ a[493]=0;
+ a[494]=0;
+ a[495]=0;
+ a[496]=0;
+ a[497]=0;
+ a[498]=0;
+ a[499]=0;
+ a[500]=0;
+ a[501]=0;
+ a[502]=0;
+ a[503]=0;
+ a[504]=0;
+ a[505]=0;
+ a[506]=0;
+ a[507]=0;
+ a[508]=0;
+ a[509]=0;
+ a[510]=0;
+ a[511]=0;
+ a[512]=0;
+ a[513]=0;
+ a[514]=0;
+ a[515]=0;
+ a[516]=0;
+ a[517]=0;
+ a[518]=0;
+ a[519]=0;
+ a[520]=0;
+ a[521]=0;
+ a[522]=0;
+ a[523]=0;
+ a[524]=0;
+ a[525]=0;
+ a[526]=0;
+ a[527]=0;
+ a[528]=0;
+ a[529]=0;
+ a[530]=0;
+ a[531]=0;
+ a[532]=0;
+ a[533]=0;
+ a[534]=0;
+ a[535]=0;
+ a[536]=0;
+ a[537]=0;
+ a[538]=0;
+ a[539]=0;
+ a[540]=0;
+ a[541]=0;
+ a[542]=0;
+ a[543]=0;
+ a[544]=0;
+ a[545]=0;
+ a[546]=0;
+ a[547]=0;
+ a[548]=0;
+ a[549]=0;
+ a[550]=0;
+ a[551]=0;
+ a[552]=0;
+ a[553]=0;
+ a[554]=0;
+ a[555]=0;
+ a[556]=0;
+ a[557]=0;
+ a[558]=0;
+ a[559]=0;
+ a[560]=0;
+ a[561]=0;
+ a[562]=0;
+ a[563]=0;
+ a[564]=0;
+ a[565]=0;
+ a[566]=0;
+ a[567]=0;
+ a[568]=0;
+ a[569]=0;
+ a[570]=0;
+ a[571]=0;
+ a[572]=0;
+ a[573]=0;
+ a[574]=0;
+ a[575]=0;
+ a[576]=0;
+ a[577]=0;
+ a[578]=0;
+ a[579]=0;
+ a[580]=0;
+ a[581]=0;
+ a[582]=0;
+ a[583]=0;
+ a[584]=0;
+ a[585]=0;
+ a[586]=0;
+ a[587]=0;
+ a[588]=0;
+ a[589]=0;
+ a[590]=0;
+ a[591]=0;
+ a[592]=0;
+ a[593]=0;
+ a[594]=0;
+ a[595]=0;
+ a[596]=0;
+ a[597]=0;
+ a[598]=0;
+ a[599]=0;
+ a[600]=0;
+ a[601]=0;
+ a[602]=0;
+ a[603]=0;
+ a[604]=0;
+ a[605]=0;
+ a[606]=0;
+ a[607]=0;
+ a[608]=0;
+ a[609]=0;
+ a[610]=0;
+ a[611]=0;
+ a[612]=0;
+ a[613]=0;
+ a[614]=0;
+ a[615]=0;
+ a[616]=0;
+ a[617]=0;
+ a[618]=0;
+ a[619]=0;
+ a[620]=0;
+ a[621]=0;
+ a[622]=0;
+ a[623]=0;
+ a[624]=0;
+ a[625]=0;
+ a[626]=0;
+ a[627]=0;
+ a[628]=0;
+ a[629]=0;
+ a[630]=0;
+ a[631]=0;
+ a[632]=0;
+ a[633]=0;
+ a[634]=0;
+ a[635]=0;
+ a[636]=0;
+ a[637]=0;
+ a[638]=0;
+ a[639]=0;
+ a[640]=0;
+ a[641]=0;
+ a[642]=0;
+ a[643]=0;
+ a[644]=0;
+ a[645]=0;
+ a[646]=0;
+ a[647]=0;
+ a[648]=0;
+ a[649]=0;
+ a[650]=0;
+ a[651]=0;
+ a[652]=0;
+ a[653]=0;
+ a[654]=0;
+ a[655]=0;
+ a[656]=0;
+ a[657]=0;
+ a[658]=0;
+ a[659]=0;
+ a[660]=0;
+ a[661]=0;
+ a[662]=0;
+ a[663]=0;
+ a[664]=0;
+ a[665]=0;
+ a[666]=0;
+ a[667]=0;
+ a[668]=0;
+ a[669]=0;
+ a[670]=0;
+ a[671]=0;
+ a[672]=0;
+ a[673]=0;
+ a[674]=0;
+ a[675]=0;
+ a[676]=0;
+ a[677]=0;
+ a[678]=0;
+ a[679]=0;
+ a[680]=0;
+ a[681]=0;
+ a[682]=0;
+ a[683]=0;
+ a[684]=0;
+ a[685]=0;
+ a[686]=0;
+ a[687]=0;
+ a[688]=0;
+ a[689]=0;
+ a[690]=0;
+ a[691]=0;
+ a[692]=0;
+ a[693]=0;
+ a[694]=0;
+ a[695]=0;
+ a[696]=0;
+ a[697]=0;
+ a[698]=0;
+ a[699]=0;
+ a[700]=0;
+ a[701]=0;
+ a[702]=0;
+ a[703]=0;
+ a[704]=0;
+ a[705]=0;
+ a[706]=0;
+ a[707]=0;
+ a[708]=0;
+ a[709]=0;
+ a[710]=0;
+ a[711]=0;
+ a[712]=0;
+ a[713]=0;
+ a[714]=0;
+ a[715]=0;
+ a[716]=0;
+ a[717]=0;
+ a[718]=0;
+ a[719]=0;
+ a[720]=0;
+ a[721]=0;
+ a[722]=0;
+ a[723]=0;
+ a[724]=0;
+ a[725]=0;
+ a[726]=0;
+ a[727]=0;
+ a[728]=0;
+ a[729]=0;
+ a[730]=0;
+ a[731]=0;
+ a[732]=0;
+ a[733]=0;
+ a[734]=0;
+ a[735]=0;
+ a[736]=0;
+ a[737]=0;
+ a[738]=0;
+ a[739]=0;
+ a[740]=0;
+ a[741]=0;
+ a[742]=0;
+ a[743]=0;
+ a[744]=0;
+ a[745]=0;
+ a[746]=0;
+ a[747]=0;
+ a[748]=0;
+ a[749]=0;
+ a[750]=0;
+ a[751]=0;
+ a[752]=0;
+ a[753]=0;
+ a[754]=0;
+ a[755]=0;
+ a[756]=0;
+ a[757]=0;
+ a[758]=0;
+ a[759]=0;
+ a[760]=0;
+ a[761]=0;
+ a[762]=0;
+ a[763]=0;
+ a[764]=0;
+ a[765]=0;
+ a[766]=0;
+ a[767]=0;
+ a[768]=0;
+ a[769]=0;
+ a[770]=0;
+ a[771]=0;
+ a[772]=0;
+ a[773]=0;
+ a[774]=0;
+ a[775]=0;
+ a[776]=0;
+ a[777]=0;
+ a[778]=0;
+ a[779]=0;
+ a[780]=0;
+ a[781]=0;
+ a[782]=0;
+ a[783]=0;
+ a[784]=0;
+ a[785]=0;
+ a[786]=0;
+ a[787]=0;
+ a[788]=0;
+ a[789]=0;
+ a[790]=0;
+ a[791]=0;
+ a[792]=0;
+ a[793]=0;
+ a[794]=0;
+ a[795]=0;
+ a[796]=0;
+ a[797]=0;
+ a[798]=0;
+ a[799]=0;
+ a[800]=0;
+ a[801]=0;
+ a[802]=0;
+ a[803]=0;
+ a[804]=0;
+ a[805]=0;
+ a[806]=0;
+ a[807]=0;
+ a[808]=0;
+ a[809]=0;
+ a[810]=0;
+ a[811]=0;
+ a[812]=0;
+ a[813]=0;
+ a[814]=0;
+ a[815]=0;
+ a[816]=0;
+ a[817]=0;
+ a[818]=0;
+ a[819]=0;
+ a[820]=0;
+ a[821]=0;
+ a[822]=0;
+ a[823]=0;
+ a[824]=0;
+ a[825]=0;
+ a[826]=0;
+ a[827]=0;
+ a[828]=0;
+ a[829]=0;
+ a[830]=0;
+ a[831]=0;
+ a[832]=0;
+ a[833]=0;
+ a[834]=0;
+ a[835]=0;
+ a[836]=0;
+ a[837]=0;
+ a[838]=0;
+ a[839]=0;
+ a[840]=0;
+ a[841]=0;
+ a[842]=0;
+ a[843]=0;
+ a[844]=0;
+ a[845]=0;
+ a[846]=0;
+ a[847]=0;
+ a[848]=0;
+ a[849]=0;
+ a[850]=0;
+ a[851]=0;
+ a[852]=0;
+ a[853]=0;
+ a[854]=0;
+ a[855]=0;
+ a[856]=0;
+ a[857]=0;
+ a[858]=0;
+ a[859]=0;
+ a[860]=0;
+ a[861]=0;
+ a[862]=0;
+ a[863]=0;
+ a[864]=0;
+ a[865]=0;
+ a[866]=0;
+ a[867]=0;
+ a[868]=0;
+ a[869]=0;
+ a[870]=0;
+ a[871]=0;
+ a[872]=0;
+ a[873]=0;
+ a[874]=0;
+ a[875]=0;
+ a[876]=0;
+ a[877]=0;
+ a[878]=0;
+ a[879]=0;
+ a[880]=0;
+ a[881]=0;
+ a[882]=0;
+ a[883]=0;
+ a[884]=0;
+ a[885]=0;
+ a[886]=0;
+ a[887]=0;
+ a[888]=0;
+ a[889]=0;
+ a[890]=0;
+ a[891]=0;
+ a[892]=0;
+ a[893]=0;
+ a[894]=0;
+ a[895]=0;
+ a[896]=0;
+ a[897]=0;
+ a[898]=0;
+ a[899]=0;
+ a[900]=0;
+ a[901]=0;
+ a[902]=0;
+ a[903]=0;
+ a[904]=0;
+ a[905]=0;
+ a[906]=0;
+ a[907]=0;
+ a[908]=0;
+ a[909]=0;
+ a[910]=0;
+ a[911]=0;
+ a[912]=0;
+ a[913]=0;
+ a[914]=0;
+ a[915]=0;
+ a[916]=0;
+ a[917]=0;
+ a[918]=0;
+ a[919]=0;
+ a[920]=0;
+ a[921]=0;
+ a[922]=0;
+ a[923]=0;
+ a[924]=0;
+ a[925]=0;
+ a[926]=0;
+ a[927]=0;
+ a[928]=0;
+ a[929]=0;
+ a[930]=0;
+ a[931]=0;
+ a[932]=0;
+ a[933]=0;
+ a[934]=0;
+ a[935]=0;
+ a[936]=0;
+ a[937]=0;
+ a[938]=0;
+ a[939]=0;
+ a[940]=0;
+ a[941]=0;
+ a[942]=0;
+ a[943]=0;
+ a[944]=0;
+ a[945]=0;
+ a[946]=0;
+ a[947]=0;
+ a[948]=0;
+ a[949]=0;
+ a[950]=0;
+ a[951]=0;
+ a[952]=0;
+ a[953]=0;
+ a[954]=0;
+ a[955]=0;
+ a[956]=0;
+ a[957]=0;
+ a[958]=0;
+ a[959]=0;
+ a[960]=0;
+ a[961]=0;
+ a[962]=0;
+ a[963]=0;
+ a[964]=0;
+ a[965]=0;
+ a[966]=0;
+ a[967]=0;
+ a[968]=0;
+ a[969]=0;
+ a[970]=0;
+ a[971]=0;
+ a[972]=0;
+ a[973]=0;
+ a[974]=0;
+ a[975]=0;
+ a[976]=0;
+ a[977]=0;
+ a[978]=0;
+ a[979]=0;
+ a[980]=0;
+ a[981]=0;
+ a[982]=0;
+ a[983]=0;
+ a[984]=0;
+ a[985]=0;
+ a[986]=0;
+ a[987]=0;
+ a[988]=0;
+ a[989]=0;
+ a[990]=0;
+ a[991]=0;
+ a[992]=0;
+ a[993]=0;
+ a[994]=0;
+ a[995]=0;
+ a[996]=0;
+ a[997]=0;
+ a[998]=0;
+ a[999]=0;
+ a[1000]=0;
+ a[1001]=0;
+ a[1002]=0;
+ a[1003]=0;
+ a[1004]=0;
+ a[1005]=0;
+ a[1006]=0;
+ a[1007]=0;
+ a[1008]=0;
+ a[1009]=0;
+ a[1010]=0;
+ a[1011]=0;
+ a[1012]=0;
+ a[1013]=0;
+ a[1014]=0;
+ a[1015]=0;
+ a[1016]=0;
+ a[1017]=0;
+ a[1018]=0;
+ a[1019]=0;
+ a[1020]=0;
+ a[1021]=0;
+ a[1022]=0;
+ a[1023]=0;
+ a[1024]=0;
+ a[1025]=0;
+ a[1026]=0;
+ a[1027]=0;
+ a[1028]=0;
+ a[1029]=0;
+ a[1030]=0;
+ a[1031]=0;
+ a[1032]=0;
+ a[1033]=0;
+ a[1034]=0;
+ a[1035]=0;
+ a[1036]=0;
+ a[1037]=0;
+ a[1038]=0;
+ a[1039]=0;
+ a[1040]=0;
+ a[1041]=0;
+ a[1042]=0;
+ a[1043]=0;
+ a[1044]=0;
+ a[1045]=0;
+ a[1046]=0;
+ a[1047]=0;
+ a[1048]=0;
+ a[1049]=0;
+ a[1050]=0;
+ a[1051]=0;
+ a[1052]=0;
+ a[1053]=0;
+ a[1054]=0;
+ a[1055]=0;
+ a[1056]=0;
+ a[1057]=0;
+ a[1058]=0;
+ a[1059]=0;
+ a[1060]=0;
+ a[1061]=0;
+ a[1062]=0;
+ a[1063]=0;
+ a[1064]=0;
+ a[1065]=0;
+ a[1066]=0;
+ a[1067]=0;
+ a[1068]=0;
+ a[1069]=0;
+ a[1070]=0;
+ a[1071]=0;
+ a[1072]=0;
+ a[1073]=0;
+ a[1074]=0;
+ a[1075]=0;
+ a[1076]=0;
+ a[1077]=0;
+ a[1078]=0;
+ a[1079]=0;
+ a[1080]=0;
+ a[1081]=0;
+ a[1082]=0;
+ a[1083]=0;
+ a[1084]=0;
+ a[1085]=0;
+ a[1086]=0;
+ a[1087]=0;
+ a[1088]=0;
+ a[1089]=0;
+ a[1090]=0;
+ a[1091]=0;
+ a[1092]=0;
+ a[1093]=0;
+ a[1094]=0;
+ a[1095]=0;
+ a[1096]=0;
+ a[1097]=0;
+ a[1098]=0;
+ a[1099]=0;
+ a[1100]=0;
+ a[1101]=0;
+ a[1102]=0;
+ a[1103]=0;
+ a[1104]=0;
+ a[1105]=0;
+ a[1106]=0;
+ a[1107]=0;
+ a[1108]=0;
+ a[1109]=0;
+ a[1110]=0;
+ a[1111]=0;
+ a[1112]=0;
+ a[1113]=0;
+ a[1114]=0;
+ a[1115]=0;
+ a[1116]=0;
+ a[1117]=0;
+ a[1118]=0;
+ a[1119]=0;
+ a[1120]=0;
+ a[1121]=0;
+ a[1122]=0;
+ a[1123]=0;
+ a[1124]=0;
+ a[1125]=0;
+ a[1126]=0;
+ a[1127]=0;
+ a[1128]=0;
+ a[1129]=0;
+ a[1130]=0;
+ a[1131]=0;
+ a[1132]=0;
+ a[1133]=0;
+ a[1134]=0;
+ a[1135]=0;
+ a[1136]=0;
+ a[1137]=0;
+ a[1138]=0;
+ a[1139]=0;
+ a[1140]=0;
+ a[1141]=0;
+ a[1142]=0;
+ a[1143]=0;
+ a[1144]=0;
+ a[1145]=0;
+ a[1146]=0;
+ a[1147]=0;
+ a[1148]=0;
+ a[1149]=0;
+ a[1150]=0;
+ a[1151]=0;
+ a[1152]=0;
+ a[1153]=0;
+ a[1154]=0;
+ a[1155]=0;
+ a[1156]=0;
+ a[1157]=0;
+ a[1158]=0;
+ a[1159]=0;
+ a[1160]=0;
+ a[1161]=0;
+ a[1162]=0;
+ a[1163]=0;
+ a[1164]=0;
+ a[1165]=0;
+ a[1166]=0;
+ a[1167]=0;
+ a[1168]=0;
+ a[1169]=0;
+ a[1170]=0;
+ a[1171]=0;
+ a[1172]=0;
+ a[1173]=0;
+ a[1174]=0;
+ a[1175]=0;
+ a[1176]=0;
+ a[1177]=0;
+ a[1178]=0;
+ a[1179]=0;
+ a[1180]=0;
+ a[1181]=0;
+ a[1182]=0;
+ a[1183]=0;
+ a[1184]=0;
+ a[1185]=0;
+ a[1186]=0;
+ a[1187]=0;
+ a[1188]=0;
+ a[1189]=0;
+ a[1190]=0;
+ a[1191]=0;
+ a[1192]=0;
+ a[1193]=0;
+ a[1194]=0;
+ a[1195]=0;
+ a[1196]=0;
+ a[1197]=0;
+ a[1198]=0;
+ a[1199]=0;
+ a[1200]=0;
+ a[1201]=0;
+ a[1202]=0;
+ a[1203]=0;
+ a[1204]=0;
+ a[1205]=0;
+ a[1206]=0;
+ a[1207]=0;
+ a[1208]=0;
+ a[1209]=0;
+ a[1210]=0;
+ a[1211]=0;
+ a[1212]=0;
+ a[1213]=0;
+ a[1214]=0;
+ a[1215]=0;
+ a[1216]=0;
+ a[1217]=0;
+ a[1218]=0;
+ a[1219]=0;
+ a[1220]=0;
+ a[1221]=0;
+ a[1222]=0;
+ a[1223]=0;
+ a[1224]=0;
+ a[1225]=0;
+ a[1226]=0;
+ a[1227]=0;
+ a[1228]=0;
+ a[1229]=0;
+ a[1230]=0;
+ a[1231]=0;
+ a[1232]=0;
+ a[1233]=0;
+ a[1234]=0;
+ a[1235]=0;
+ a[1236]=0;
+ a[1237]=0;
+ a[1238]=0;
+ a[1239]=0;
+ a[1240]=0;
+ a[1241]=0;
+ a[1242]=0;
+ a[1243]=0;
+ a[1244]=0;
+ a[1245]=0;
+ a[1246]=0;
+ a[1247]=0;
+ a[1248]=0;
+ a[1249]=0;
+ a[1250]=0;
+ a[1251]=0;
+ a[1252]=0;
+ a[1253]=0;
+ a[1254]=0;
+ a[1255]=0;
+ a[1256]=0;
+ a[1257]=0;
+ a[1258]=0;
+ a[1259]=0;
+ a[1260]=0;
+ a[1261]=0;
+ a[1262]=0;
+ a[1263]=0;
+ a[1264]=0;
+ a[1265]=0;
+ a[1266]=0;
+ a[1267]=0;
+ a[1268]=0;
+ a[1269]=0;
+ a[1270]=0;
+ a[1271]=0;
+ a[1272]=0;
+ a[1273]=0;
+ a[1274]=0;
+ a[1275]=0;
+ a[1276]=0;
+ a[1277]=0;
+ a[1278]=0;
+ a[1279]=0;
+ a[1280]=0;
+ a[1281]=0;
+ a[1282]=0;
+ a[1283]=0;
+ a[1284]=0;
+ a[1285]=0;
+ a[1286]=0;
+ a[1287]=0;
+ a[1288]=0;
+ a[1289]=0;
+ a[1290]=0;
+ a[1291]=0;
+ a[1292]=0;
+ a[1293]=0;
+ a[1294]=0;
+ a[1295]=0;
+ a[1296]=0;
+ a[1297]=0;
+ a[1298]=0;
+ a[1299]=0;
+ a[1300]=0;
+ a[1301]=0;
+ a[1302]=0;
+ a[1303]=0;
+ a[1304]=0;
+ a[1305]=0;
+ a[1306]=0;
+ a[1307]=0;
+ a[1308]=0;
+ a[1309]=0;
+ a[1310]=0;
+ a[1311]=0;
+ a[1312]=0;
+ a[1313]=0;
+ a[1314]=0;
+ a[1315]=0;
+ a[1316]=0;
+ a[1317]=0;
+ a[1318]=0;
+ a[1319]=0;
+ a[1320]=0;
+ a[1321]=0;
+ a[1322]=0;
+ a[1323]=0;
+ a[1324]=0;
+ a[1325]=0;
+ a[1326]=0;
+ a[1327]=0;
+ a[1328]=0;
+ a[1329]=0;
+ a[1330]=0;
+ a[1331]=0;
+ a[1332]=0;
+ a[1333]=0;
+ a[1334]=0;
+ a[1335]=0;
+ a[1336]=0;
+ a[1337]=0;
+ a[1338]=0;
+ a[1339]=0;
+ a[1340]=0;
+ a[1341]=0;
+ a[1342]=0;
+ a[1343]=0;
+ a[1344]=0;
+ a[1345]=0;
+ a[1346]=0;
+ a[1347]=0;
+ a[1348]=0;
+ a[1349]=0;
+ a[1350]=0;
+ a[1351]=0;
+ a[1352]=0;
+ a[1353]=0;
+ a[1354]=0;
+ a[1355]=0;
+ a[1356]=0;
+ a[1357]=0;
+ a[1358]=0;
+ a[1359]=0;
+ a[1360]=0;
+ a[1361]=0;
+ a[1362]=0;
+ a[1363]=0;
+ a[1364]=0;
+ a[1365]=0;
+ a[1366]=0;
+ a[1367]=0;
+ a[1368]=0;
+ a[1369]=0;
+ a[1370]=0;
+ a[1371]=0;
+ a[1372]=0;
+ a[1373]=0;
+ a[1374]=0;
+ a[1375]=0;
+ a[1376]=0;
+ a[1377]=0;
+ a[1378]=0;
+ a[1379]=0;
+ a[1380]=0;
+ a[1381]=0;
+ a[1382]=0;
+ a[1383]=0;
+ a[1384]=0;
+ a[1385]=0;
+ a[1386]=0;
+ a[1387]=0;
+ a[1388]=0;
+ a[1389]=0;
+ a[1390]=0;
+ a[1391]=0;
+ a[1392]=0;
+ a[1393]=0;
+ a[1394]=0;
+ a[1395]=0;
+ a[1396]=0;
+ a[1397]=0;
+ a[1398]=0;
+ a[1399]=0;
+ a[1400]=0;
+ a[1401]=0;
+ a[1402]=0;
+ a[1403]=0;
+ a[1404]=0;
+ a[1405]=0;
+ a[1406]=0;
+ a[1407]=0;
+ a[1408]=0;
+ a[1409]=0;
+ a[1410]=0;
+ a[1411]=0;
+ a[1412]=0;
+ a[1413]=0;
+ a[1414]=0;
+ a[1415]=0;
+ a[1416]=0;
+ a[1417]=0;
+ a[1418]=0;
+ a[1419]=0;
+ a[1420]=0;
+ a[1421]=0;
+ a[1422]=0;
+ a[1423]=0;
+ a[1424]=0;
+ a[1425]=0;
+ a[1426]=0;
+ a[1427]=0;
+ a[1428]=0;
+ a[1429]=0;
+ a[1430]=0;
+ a[1431]=0;
+ a[1432]=0;
+ a[1433]=0;
+ a[1434]=0;
+ a[1435]=0;
+ a[1436]=0;
+ a[1437]=0;
+ a[1438]=0;
+ a[1439]=0;
+ a[1440]=0;
+ a[1441]=0;
+ a[1442]=0;
+ a[1443]=0;
+ a[1444]=0;
+ a[1445]=0;
+ a[1446]=0;
+ a[1447]=0;
+ a[1448]=0;
+ a[1449]=0;
+ a[1450]=0;
+ a[1451]=0;
+ a[1452]=0;
+ a[1453]=0;
+ a[1454]=0;
+ a[1455]=0;
+ a[1456]=0;
+ a[1457]=0;
+ a[1458]=0;
+ a[1459]=0;
+ a[1460]=0;
+ a[1461]=0;
+ a[1462]=0;
+ a[1463]=0;
+ a[1464]=0;
+ a[1465]=0;
+ a[1466]=0;
+ a[1467]=0;
+ a[1468]=0;
+ a[1469]=0;
+ a[1470]=0;
+ a[1471]=0;
+ a[1472]=0;
+ a[1473]=0;
+ a[1474]=0;
+ a[1475]=0;
+ a[1476]=0;
+ a[1477]=0;
+ a[1478]=0;
+ a[1479]=0;
+ a[1480]=0;
+ a[1481]=0;
+ a[1482]=0;
+ a[1483]=0;
+ a[1484]=0;
+ a[1485]=0;
+ a[1486]=0;
+ a[1487]=0;
+ a[1488]=0;
+ a[1489]=0;
+ a[1490]=0;
+ a[1491]=0;
+ a[1492]=0;
+ a[1493]=0;
+ a[1494]=0;
+ a[1495]=0;
+ a[1496]=0;
+ a[1497]=0;
+ a[1498]=0;
+ a[1499]=0;
+ a[1500]=0;
+ a[1501]=0;
+ a[1502]=0;
+ a[1503]=0;
+ a[1504]=0;
+ a[1505]=0;
+ a[1506]=0;
+ a[1507]=0;
+ a[1508]=0;
+ a[1509]=0;
+ a[1510]=0;
+ a[1511]=0;
+ a[1512]=0;
+ a[1513]=0;
+ a[1514]=0;
+ a[1515]=0;
+ a[1516]=0;
+ a[1517]=0;
+ a[1518]=0;
+ a[1519]=0;
+ a[1520]=0;
+ a[1521]=0;
+ a[1522]=0;
+ a[1523]=0;
+ a[1524]=0;
+ a[1525]=0;
+ a[1526]=0;
+ a[1527]=0;
+ a[1528]=0;
+ a[1529]=0;
+ a[1530]=0;
+ a[1531]=0;
+ a[1532]=0;
+ a[1533]=0;
+ a[1534]=0;
+ a[1535]=0;
+ a[1536]=0;
+ a[1537]=0;
+ a[1538]=0;
+ a[1539]=0;
+ a[1540]=0;
+ a[1541]=0;
+ a[1542]=0;
+ a[1543]=0;
+ a[1544]=0;
+ a[1545]=0;
+ a[1546]=0;
+ a[1547]=0;
+ a[1548]=0;
+ a[1549]=0;
+ a[1550]=0;
+ a[1551]=0;
+ a[1552]=0;
+ a[1553]=0;
+ a[1554]=0;
+ a[1555]=0;
+ a[1556]=0;
+ a[1557]=0;
+ a[1558]=0;
+ a[1559]=0;
+ a[1560]=0;
+ a[1561]=0;
+ a[1562]=0;
+ a[1563]=0;
+ a[1564]=0;
+ a[1565]=0;
+ a[1566]=0;
+ a[1567]=0;
+ a[1568]=0;
+ a[1569]=0;
+ a[1570]=0;
+ a[1571]=0;
+ a[1572]=0;
+ a[1573]=0;
+ a[1574]=0;
+ a[1575]=0;
+ a[1576]=0;
+ a[1577]=0;
+ a[1578]=0;
+ a[1579]=0;
+ a[1580]=0;
+ a[1581]=0;
+ a[1582]=0;
+ a[1583]=0;
+ a[1584]=0;
+ a[1585]=0;
+ a[1586]=0;
+ a[1587]=0;
+ a[1588]=0;
+ a[1589]=0;
+ a[1590]=0;
+ a[1591]=0;
+ a[1592]=0;
+ a[1593]=0;
+ a[1594]=0;
+ a[1595]=0;
+ a[1596]=0;
+ a[1597]=0;
+ a[1598]=0;
+ a[1599]=0;
+ a[1600]=0;
+ a[1601]=0;
+ a[1602]=0;
+ a[1603]=0;
+ a[1604]=0;
+ a[1605]=0;
+ a[1606]=0;
+ a[1607]=0;
+ a[1608]=0;
+ a[1609]=0;
+ a[1610]=0;
+ a[1611]=0;
+ a[1612]=0;
+ a[1613]=0;
+ a[1614]=0;
+ a[1615]=0;
+ a[1616]=0;
+ a[1617]=0;
+ a[1618]=0;
+ a[1619]=0;
+ a[1620]=0;
+ a[1621]=0;
+ a[1622]=0;
+ a[1623]=0;
+ a[1624]=0;
+ a[1625]=0;
+ a[1626]=0;
+ a[1627]=0;
+ a[1628]=0;
+ a[1629]=0;
+ a[1630]=0;
+ a[1631]=0;
+ a[1632]=0;
+ a[1633]=0;
+ a[1634]=0;
+ a[1635]=0;
+ a[1636]=0;
+ a[1637]=0;
+ a[1638]=0;
+ a[1639]=0;
+ a[1640]=0;
+ a[1641]=0;
+ a[1642]=0;
+ a[1643]=0;
+ a[1644]=0;
+ a[1645]=0;
+ a[1646]=0;
+ a[1647]=0;
+ a[1648]=0;
+ a[1649]=0;
+ a[1650]=0;
+ a[1651]=0;
+ a[1652]=0;
+ a[1653]=0;
+ a[1654]=0;
+ a[1655]=0;
+ a[1656]=0;
+ a[1657]=0;
+ a[1658]=0;
+ a[1659]=0;
+ a[1660]=0;
+ a[1661]=0;
+ a[1662]=0;
+ a[1663]=0;
+ a[1664]=0;
+ a[1665]=0;
+ a[1666]=0;
+ a[1667]=0;
+ a[1668]=0;
+ a[1669]=0;
+ a[1670]=0;
+ a[1671]=0;
+ a[1672]=0;
+ a[1673]=0;
+ a[1674]=0;
+ a[1675]=0;
+ a[1676]=0;
+ a[1677]=0;
+ a[1678]=0;
+ a[1679]=0;
+ a[1680]=0;
+ a[1681]=0;
+ a[1682]=0;
+ a[1683]=0;
+ a[1684]=0;
+ a[1685]=0;
+ a[1686]=0;
+ a[1687]=0;
+ a[1688]=0;
+ a[1689]=0;
+ a[1690]=0;
+ a[1691]=0;
+ a[1692]=0;
+ a[1693]=0;
+ a[1694]=0;
+ a[1695]=0;
+ a[1696]=0;
+ a[1697]=0;
+ a[1698]=0;
+ a[1699]=0;
+ a[1700]=0;
+ a[1701]=0;
+ a[1702]=0;
+ a[1703]=0;
+ a[1704]=0;
+ a[1705]=0;
+ a[1706]=0;
+ a[1707]=0;
+ a[1708]=0;
+ a[1709]=0;
+ a[1710]=0;
+ a[1711]=0;
+ a[1712]=0;
+ a[1713]=0;
+ a[1714]=0;
+ a[1715]=0;
+ a[1716]=0;
+ a[1717]=0;
+ a[1718]=0;
+ a[1719]=0;
+ a[1720]=0;
+ a[1721]=0;
+ a[1722]=0;
+ a[1723]=0;
+ a[1724]=0;
+ a[1725]=0;
+ a[1726]=0;
+ a[1727]=0;
+ a[1728]=0;
+ a[1729]=0;
+ a[1730]=0;
+ a[1731]=0;
+ a[1732]=0;
+ a[1733]=0;
+ a[1734]=0;
+ a[1735]=0;
+ a[1736]=0;
+ a[1737]=0;
+ a[1738]=0;
+ a[1739]=0;
+ a[1740]=0;
+ a[1741]=0;
+ a[1742]=0;
+ a[1743]=0;
+ a[1744]=0;
+ a[1745]=0;
+ a[1746]=0;
+ a[1747]=0;
+ a[1748]=0;
+ a[1749]=0;
+ a[1750]=0;
+ a[1751]=0;
+ a[1752]=0;
+ a[1753]=0;
+ a[1754]=0;
+ a[1755]=0;
+ a[1756]=0;
+ a[1757]=0;
+ a[1758]=0;
+ a[1759]=0;
+ a[1760]=0;
+ a[1761]=0;
+ a[1762]=0;
+ a[1763]=0;
+ a[1764]=0;
+ a[1765]=0;
+ a[1766]=0;
+ a[1767]=0;
+ a[1768]=0;
+ a[1769]=0;
+ a[1770]=0;
+ a[1771]=0;
+ a[1772]=0;
+ a[1773]=0;
+ a[1774]=0;
+ a[1775]=0;
+ a[1776]=0;
+ a[1777]=0;
+ a[1778]=0;
+ a[1779]=0;
+ a[1780]=0;
+ a[1781]=0;
+ a[1782]=0;
+ a[1783]=0;
+ a[1784]=0;
+ a[1785]=0;
+ a[1786]=0;
+ a[1787]=0;
+ a[1788]=0;
+ a[1789]=0;
+ a[1790]=0;
+ a[1791]=0;
+ a[1792]=0;
+ a[1793]=0;
+ a[1794]=0;
+ a[1795]=0;
+ a[1796]=0;
+ a[1797]=0;
+ a[1798]=0;
+ a[1799]=0;
+ a[1800]=0;
+ a[1801]=0;
+ a[1802]=0;
+ a[1803]=0;
+ a[1804]=0;
+ a[1805]=0;
+ a[1806]=0;
+ a[1807]=0;
+ a[1808]=0;
+ a[1809]=0;
+ a[1810]=0;
+ a[1811]=0;
+ a[1812]=0;
+ a[1813]=0;
+ a[1814]=0;
+ a[1815]=0;
+ a[1816]=0;
+ a[1817]=0;
+ a[1818]=0;
+ a[1819]=0;
+ a[1820]=0;
+ a[1821]=0;
+ a[1822]=0;
+ a[1823]=0;
+ a[1824]=0;
+ a[1825]=0;
+ a[1826]=0;
+ a[1827]=0;
+ a[1828]=0;
+ a[1829]=0;
+ a[1830]=0;
+ a[1831]=0;
+ a[1832]=0;
+ a[1833]=0;
+ a[1834]=0;
+ a[1835]=0;
+ a[1836]=0;
+ a[1837]=0;
+ a[1838]=0;
+ a[1839]=0;
+ a[1840]=0;
+ a[1841]=0;
+ a[1842]=0;
+ a[1843]=0;
+ a[1844]=0;
+ a[1845]=0;
+ a[1846]=0;
+ a[1847]=0;
+ a[1848]=0;
+ a[1849]=0;
+ a[1850]=0;
+ a[1851]=0;
+ a[1852]=0;
+ a[1853]=0;
+ a[1854]=0;
+ a[1855]=0;
+ a[1856]=0;
+ a[1857]=0;
+ a[1858]=0;
+ a[1859]=0;
+ a[1860]=0;
+ a[1861]=0;
+ a[1862]=0;
+ a[1863]=0;
+ a[1864]=0;
+ a[1865]=0;
+ a[1866]=0;
+ a[1867]=0;
+ a[1868]=0;
+ a[1869]=0;
+ a[1870]=0;
+ a[1871]=0;
+ a[1872]=0;
+ a[1873]=0;
+ a[1874]=0;
+ a[1875]=0;
+ a[1876]=0;
+ a[1877]=0;
+ a[1878]=0;
+ a[1879]=0;
+ a[1880]=0;
+ a[1881]=0;
+ a[1882]=0;
+ a[1883]=0;
+ a[1884]=0;
+ a[1885]=0;
+ a[1886]=0;
+ a[1887]=0;
+ a[1888]=0;
+ a[1889]=0;
+ a[1890]=0;
+ a[1891]=0;
+ a[1892]=0;
+ a[1893]=0;
+ a[1894]=0;
+ a[1895]=0;
+ a[1896]=0;
+ a[1897]=0;
+ a[1898]=0;
+ a[1899]=0;
+ a[1900]=0;
+ a[1901]=0;
+ a[1902]=0;
+ a[1903]=0;
+ a[1904]=0;
+ a[1905]=0;
+ a[1906]=0;
+ a[1907]=0;
+ a[1908]=0;
+ a[1909]=0;
+ a[1910]=0;
+ a[1911]=0;
+ a[1912]=0;
+ a[1913]=0;
+ a[1914]=0;
+ a[1915]=0;
+ a[1916]=0;
+ a[1917]=0;
+ a[1918]=0;
+ a[1919]=0;
+ a[1920]=0;
+ a[1921]=0;
+ a[1922]=0;
+ a[1923]=0;
+ a[1924]=0;
+ a[1925]=0;
+ a[1926]=0;
+ a[1927]=0;
+ a[1928]=0;
+ a[1929]=0;
+ a[1930]=0;
+ a[1931]=0;
+ a[1932]=0;
+ a[1933]=0;
+ a[1934]=0;
+ a[1935]=0;
+ a[1936]=0;
+ a[1937]=0;
+ a[1938]=0;
+ a[1939]=0;
+ a[1940]=0;
+ a[1941]=0;
+ a[1942]=0;
+ a[1943]=0;
+ a[1944]=0;
+ a[1945]=0;
+ a[1946]=0;
+ a[1947]=0;
+ a[1948]=0;
+ a[1949]=0;
+ a[1950]=0;
+ a[1951]=0;
+ a[1952]=0;
+ a[1953]=0;
+ a[1954]=0;
+ a[1955]=0;
+ a[1956]=0;
+ a[1957]=0;
+ a[1958]=0;
+ a[1959]=0;
+ a[1960]=0;
+ a[1961]=0;
+ a[1962]=0;
+ a[1963]=0;
+ a[1964]=0;
+ a[1965]=0;
+ a[1966]=0;
+ a[1967]=0;
+ a[1968]=0;
+ a[1969]=0;
+ a[1970]=0;
+ a[1971]=0;
+ a[1972]=0;
+ a[1973]=0;
+ a[1974]=0;
+ a[1975]=0;
+ a[1976]=0;
+ a[1977]=0;
+ a[1978]=0;
+ a[1979]=0;
+ a[1980]=0;
+ a[1981]=0;
+ a[1982]=0;
+ a[1983]=0;
+ a[1984]=0;
+ a[1985]=0;
+ a[1986]=0;
+ a[1987]=0;
+ a[1988]=0;
+ a[1989]=0;
+ a[1990]=0;
+ a[1991]=0;
+ a[1992]=0;
+ a[1993]=0;
+ a[1994]=0;
+ a[1995]=0;
+ a[1996]=0;
+ a[1997]=0;
+ a[1998]=0;
+ a[1999]=0;
+ a[2000]=0;
+ a[2001]=0;
+ a[2002]=0;
+ a[2003]=0;
+ a[2004]=0;
+ a[2005]=0;
+ a[2006]=0;
+ a[2007]=0;
+ a[2008]=0;
+ a[2009]=0;
+ a[2010]=0;
+ a[2011]=0;
+ a[2012]=0;
+ a[2013]=0;
+ a[2014]=0;
+ a[2015]=0;
+ a[2016]=0;
+ a[2017]=0;
+ a[2018]=0;
+ a[2019]=0;
+ a[2020]=0;
+ a[2021]=0;
+ a[2022]=0;
+ a[2023]=0;
+ a[2024]=0;
+ a[2025]=0;
+ a[2026]=0;
+ a[2027]=0;
+ a[2028]=0;
+ a[2029]=0;
+ a[2030]=0;
+ a[2031]=0;
+ a[2032]=0;
+ a[2033]=0;
+ a[2034]=0;
+ a[2035]=0;
+ a[2036]=0;
+ a[2037]=0;
+ a[2038]=0;
+ a[2039]=0;
+ a[2040]=0;
+ a[2041]=0;
+ a[2042]=0;
+ a[2043]=0;
+ a[2044]=0;
+ a[2045]=0;
+ a[2046]=0;
+ a[2047]=0;
+ a[2048]=0;
+ a[2049]=0;
+ a[2050]=0;
+ a[2051]=0;
+ a[2052]=0;
+ a[2053]=0;
+ a[2054]=0;
+ a[2055]=0;
+ a[2056]=0;
+ a[2057]=0;
+ a[2058]=0;
+ a[2059]=0;
+ a[2060]=0;
+ a[2061]=0;
+ a[2062]=0;
+ a[2063]=0;
+ a[2064]=0;
+ a[2065]=0;
+ a[2066]=0;
+ a[2067]=0;
+ a[2068]=0;
+ a[2069]=0;
+ a[2070]=0;
+ a[2071]=0;
+ a[2072]=0;
+ a[2073]=0;
+ a[2074]=0;
+ a[2075]=0;
+ a[2076]=0;
+ a[2077]=0;
+ a[2078]=0;
+ a[2079]=0;
+ a[2080]=0;
+ a[2081]=0;
+ a[2082]=0;
+ a[2083]=0;
+ a[2084]=0;
+ a[2085]=0;
+ a[2086]=0;
+ a[2087]=0;
+ a[2088]=0;
+ a[2089]=0;
+ a[2090]=0;
+ a[2091]=0;
+ a[2092]=0;
+ a[2093]=0;
+ a[2094]=0;
+ a[2095]=0;
+ a[2096]=0;
+ a[2097]=0;
+ a[2098]=0;
+ a[2099]=0;
+ a[2100]=0;
+ a[2101]=0;
+ a[2102]=0;
+ a[2103]=0;
+ a[2104]=0;
+ a[2105]=0;
+ a[2106]=0;
+ a[2107]=0;
+ a[2108]=0;
+ a[2109]=0;
+ a[2110]=0;
+ a[2111]=0;
+ a[2112]=0;
+ a[2113]=0;
+ a[2114]=0;
+ a[2115]=0;
+ a[2116]=0;
+ a[2117]=0;
+ a[2118]=0;
+ a[2119]=0;
+ a[2120]=0;
+ a[2121]=0;
+ a[2122]=0;
+ a[2123]=0;
+ a[2124]=0;
+ a[2125]=0;
+ a[2126]=0;
+ a[2127]=0;
+ a[2128]=0;
+ a[2129]=0;
+ a[2130]=0;
+ a[2131]=0;
+ a[2132]=0;
+ a[2133]=0;
+ a[2134]=0;
+ a[2135]=0;
+ a[2136]=0;
+ a[2137]=0;
+ a[2138]=0;
+ a[2139]=0;
+ a[2140]=0;
+ a[2141]=0;
+ a[2142]=0;
+ a[2143]=0;
+ a[2144]=0;
+ a[2145]=0;
+ a[2146]=0;
+ a[2147]=0;
+ a[2148]=0;
+ a[2149]=0;
+ a[2150]=0;
+ a[2151]=0;
+ a[2152]=0;
+ a[2153]=0;
+ a[2154]=0;
+ a[2155]=0;
+ a[2156]=0;
+ a[2157]=0;
+ a[2158]=0;
+ a[2159]=0;
+ a[2160]=0;
+ a[2161]=0;
+ a[2162]=0;
+ a[2163]=0;
+ a[2164]=0;
+ a[2165]=0;
+ a[2166]=0;
+ a[2167]=0;
+ a[2168]=0;
+ a[2169]=0;
+ a[2170]=0;
+ a[2171]=0;
+ a[2172]=0;
+ a[2173]=0;
+ a[2174]=0;
+ a[2175]=0;
+ a[2176]=0;
+ a[2177]=0;
+ a[2178]=0;
+ a[2179]=0;
+ a[2180]=0;
+ a[2181]=0;
+ a[2182]=0;
+ a[2183]=0;
+ a[2184]=0;
+ a[2185]=0;
+ a[2186]=0;
+ a[2187]=0;
+ a[2188]=0;
+ a[2189]=0;
+ a[2190]=0;
+ a[2191]=0;
+ a[2192]=0;
+ a[2193]=0;
+ a[2194]=0;
+ a[2195]=0;
+ a[2196]=0;
+ a[2197]=0;
+ a[2198]=0;
+ a[2199]=0;
+ a[2200]=0;
+ a[2201]=0;
+ a[2202]=0;
+ a[2203]=0;
+ a[2204]=0;
+ a[2205]=0;
+ a[2206]=0;
+ a[2207]=0;
+ a[2208]=0;
+ a[2209]=0;
+ a[2210]=0;
+ a[2211]=0;
+ a[2212]=0;
+ a[2213]=0;
+ a[2214]=0;
+ a[2215]=0;
+ a[2216]=0;
+ a[2217]=0;
+ a[2218]=0;
+ a[2219]=0;
+ a[2220]=0;
+ a[2221]=0;
+ a[2222]=0;
+ a[2223]=0;
+ a[2224]=0;
+ a[2225]=0;
+ a[2226]=0;
+ a[2227]=0;
+ a[2228]=0;
+ a[2229]=0;
+ a[2230]=0;
+ a[2231]=0;
+ a[2232]=0;
+ a[2233]=0;
+ a[2234]=0;
+ a[2235]=0;
+ a[2236]=0;
+ a[2237]=0;
+ a[2238]=0;
+ a[2239]=0;
+ a[2240]=0;
+ a[2241]=0;
+ a[2242]=0;
+ a[2243]=0;
+ a[2244]=0;
+ a[2245]=0;
+ a[2246]=0;
+ a[2247]=0;
+ a[2248]=0;
+ a[2249]=0;
+ a[2250]=0;
+ a[2251]=0;
+ a[2252]=0;
+ a[2253]=0;
+ a[2254]=0;
+ a[2255]=0;
+ a[2256]=0;
+ a[2257]=0;
+ a[2258]=0;
+ a[2259]=0;
+ a[2260]=0;
+ a[2261]=0;
+ a[2262]=0;
+ a[2263]=0;
+ a[2264]=0;
+ a[2265]=0;
+ a[2266]=0;
+ a[2267]=0;
+ a[2268]=0;
+ a[2269]=0;
+ a[2270]=0;
+ a[2271]=0;
+ a[2272]=0;
+ a[2273]=0;
+ a[2274]=0;
+ a[2275]=0;
+ a[2276]=0;
+ a[2277]=0;
+ a[2278]=0;
+ a[2279]=0;
+ a[2280]=0;
+ a[2281]=0;
+ a[2282]=0;
+ a[2283]=0;
+ a[2284]=0;
+ a[2285]=0;
+ a[2286]=0;
+ a[2287]=0;
+ a[2288]=0;
+ a[2289]=0;
+ a[2290]=0;
+ a[2291]=0;
+ a[2292]=0;
+ a[2293]=0;
+ a[2294]=0;
+ a[2295]=0;
+ a[2296]=0;
+ a[2297]=0;
+ a[2298]=0;
+ a[2299]=0;
+ a[2300]=0;
+ a[2301]=0;
+ a[2302]=0;
+ a[2303]=0;
+ a[2304]=0;
+ a[2305]=0;
+ a[2306]=0;
+ a[2307]=0;
+ a[2308]=0;
+ a[2309]=0;
+ a[2310]=0;
+ a[2311]=0;
+ a[2312]=0;
+ a[2313]=0;
+ a[2314]=0;
+ a[2315]=0;
+ a[2316]=0;
+ a[2317]=0;
+ a[2318]=0;
+ a[2319]=0;
+ a[2320]=0;
+ a[2321]=0;
+ a[2322]=0;
+ a[2323]=0;
+ a[2324]=0;
+ a[2325]=0;
+ a[2326]=0;
+ a[2327]=0;
+ a[2328]=0;
+ a[2329]=0;
+ a[2330]=0;
+ a[2331]=0;
+ a[2332]=0;
+ a[2333]=0;
+ a[2334]=0;
+ a[2335]=0;
+ a[2336]=0;
+ a[2337]=0;
+ a[2338]=0;
+ a[2339]=0;
+ a[2340]=0;
+ a[2341]=0;
+ a[2342]=0;
+ a[2343]=0;
+ a[2344]=0;
+ a[2345]=0;
+ a[2346]=0;
+ a[2347]=0;
+ a[2348]=0;
+ a[2349]=0;
+ a[2350]=0;
+ a[2351]=0;
+ a[2352]=0;
+ a[2353]=0;
+ a[2354]=0;
+ a[2355]=0;
+ a[2356]=0;
+ a[2357]=0;
+ a[2358]=0;
+ a[2359]=0;
+ a[2360]=0;
+ a[2361]=0;
+ a[2362]=0;
+ a[2363]=0;
+ a[2364]=0;
+ a[2365]=0;
+ a[2366]=0;
+ a[2367]=0;
+ a[2368]=0;
+ a[2369]=0;
+ a[2370]=0;
+ a[2371]=0;
+ a[2372]=0;
+ a[2373]=0;
+ a[2374]=0;
+ a[2375]=0;
+ a[2376]=0;
+ a[2377]=0;
+ a[2378]=0;
+ a[2379]=0;
+ a[2380]=0;
+ a[2381]=0;
+ a[2382]=0;
+ a[2383]=0;
+ a[2384]=0;
+ a[2385]=0;
+ a[2386]=0;
+ a[2387]=0;
+ a[2388]=0;
+ a[2389]=0;
+ a[2390]=0;
+ a[2391]=0;
+ a[2392]=0;
+ a[2393]=0;
+ a[2394]=0;
+ a[2395]=0;
+ a[2396]=0;
+ a[2397]=0;
+ a[2398]=0;
+ a[2399]=0;
+ a[2400]=0;
+ a[2401]=0;
+ a[2402]=0;
+ a[2403]=0;
+ a[2404]=0;
+ a[2405]=0;
+ a[2406]=0;
+ a[2407]=0;
+ a[2408]=0;
+ a[2409]=0;
+ a[2410]=0;
+ a[2411]=0;
+ a[2412]=0;
+ a[2413]=0;
+ a[2414]=0;
+ a[2415]=0;
+ a[2416]=0;
+ a[2417]=0;
+ a[2418]=0;
+ a[2419]=0;
+ a[2420]=0;
+ a[2421]=0;
+ a[2422]=0;
+ a[2423]=0;
+ a[2424]=0;
+ a[2425]=0;
+ a[2426]=0;
+ a[2427]=0;
+ a[2428]=0;
+ a[2429]=0;
+ a[2430]=0;
+ a[2431]=0;
+ a[2432]=0;
+ a[2433]=0;
+ a[2434]=0;
+ a[2435]=0;
+ a[2436]=0;
+ a[2437]=0;
+ a[2438]=0;
+ a[2439]=0;
+ a[2440]=0;
+ a[2441]=0;
+ a[2442]=0;
+ a[2443]=0;
+ a[2444]=0;
+ a[2445]=0;
+ a[2446]=0;
+ a[2447]=0;
+ a[2448]=0;
+ a[2449]=0;
+ a[2450]=0;
+ a[2451]=0;
+ a[2452]=0;
+ a[2453]=0;
+ a[2454]=0;
+ a[2455]=0;
+ a[2456]=0;
+ a[2457]=0;
+ a[2458]=0;
+ a[2459]=0;
+ a[2460]=0;
+ a[2461]=0;
+ a[2462]=0;
+ a[2463]=0;
+ a[2464]=0;
+ a[2465]=0;
+ a[2466]=0;
+ a[2467]=0;
+ a[2468]=0;
+ a[2469]=0;
+ a[2470]=0;
+ a[2471]=0;
+ a[2472]=0;
+ a[2473]=0;
+ a[2474]=0;
+ a[2475]=0;
+ a[2476]=0;
+ a[2477]=0;
+ a[2478]=0;
+ a[2479]=0;
+ a[2480]=0;
+ a[2481]=0;
+ a[2482]=0;
+ a[2483]=0;
+ a[2484]=0;
+ a[2485]=0;
+ a[2486]=0;
+ a[2487]=0;
+ a[2488]=0;
+ a[2489]=0;
+ a[2490]=0;
+ a[2491]=0;
+ a[2492]=0;
+ a[2493]=0;
+ a[2494]=0;
+ a[2495]=0;
+ a[2496]=0;
+ a[2497]=0;
+ a[2498]=0;
+ a[2499]=0;
+ a[2500]=0;
+ a[2501]=0;
+ a[2502]=0;
+ a[2503]=0;
+ a[2504]=0;
+ a[2505]=0;
+ a[2506]=0;
+ a[2507]=0;
+ a[2508]=0;
+ a[2509]=0;
+ a[2510]=0;
+ a[2511]=0;
+ a[2512]=0;
+ a[2513]=0;
+ a[2514]=0;
+ a[2515]=0;
+ a[2516]=0;
+ a[2517]=0;
+ a[2518]=0;
+ a[2519]=0;
+ a[2520]=0;
+ a[2521]=0;
+ a[2522]=0;
+ a[2523]=0;
+ a[2524]=0;
+ a[2525]=0;
+ a[2526]=0;
+ a[2527]=0;
+ a[2528]=0;
+ a[2529]=0;
+ a[2530]=0;
+ a[2531]=0;
+ a[2532]=0;
+ a[2533]=0;
+ a[2534]=0;
+ a[2535]=0;
+ a[2536]=0;
+ a[2537]=0;
+ a[2538]=0;
+ a[2539]=0;
+ a[2540]=0;
+ a[2541]=0;
+ a[2542]=0;
+ a[2543]=0;
+ a[2544]=0;
+ a[2545]=0;
+ a[2546]=0;
+ a[2547]=0;
+ a[2548]=0;
+ a[2549]=0;
+ a[2550]=0;
+ a[2551]=0;
+ a[2552]=0;
+ a[2553]=0;
+ a[2554]=0;
+ a[2555]=0;
+ a[2556]=0;
+ a[2557]=0;
+ a[2558]=0;
+ a[2559]=0;
+ a[2560]=0;
+ a[2561]=0;
+ a[2562]=0;
+ a[2563]=0;
+ a[2564]=0;
+ a[2565]=0;
+ a[2566]=0;
+ a[2567]=0;
+ a[2568]=0;
+ a[2569]=0;
+ a[2570]=0;
+ a[2571]=0;
+ a[2572]=0;
+ a[2573]=0;
+ a[2574]=0;
+ a[2575]=0;
+ a[2576]=0;
+ a[2577]=0;
+ a[2578]=0;
+ a[2579]=0;
+ a[2580]=0;
+ a[2581]=0;
+ a[2582]=0;
+ a[2583]=0;
+ a[2584]=0;
+ a[2585]=0;
+ a[2586]=0;
+ a[2587]=0;
+ a[2588]=0;
+ a[2589]=0;
+ a[2590]=0;
+ a[2591]=0;
+ a[2592]=0;
+ a[2593]=0;
+ a[2594]=0;
+ a[2595]=0;
+ a[2596]=0;
+ a[2597]=0;
+ a[2598]=0;
+ a[2599]=0;
+ a[2600]=0;
+ a[2601]=0;
+ a[2602]=0;
+ a[2603]=0;
+ a[2604]=0;
+ a[2605]=0;
+ a[2606]=0;
+ a[2607]=0;
+ a[2608]=0;
+ a[2609]=0;
+ a[2610]=0;
+ a[2611]=0;
+ a[2612]=0;
+ a[2613]=0;
+ a[2614]=0;
+ a[2615]=0;
+ a[2616]=0;
+ a[2617]=0;
+ a[2618]=0;
+ a[2619]=0;
+ a[2620]=0;
+ a[2621]=0;
+ a[2622]=0;
+ a[2623]=0;
+ a[2624]=0;
+ a[2625]=0;
+ a[2626]=0;
+ a[2627]=0;
+ a[2628]=0;
+ a[2629]=0;
+ a[2630]=0;
+ a[2631]=0;
+ a[2632]=0;
+ a[2633]=0;
+ a[2634]=0;
+ a[2635]=0;
+ a[2636]=0;
+ a[2637]=0;
+ a[2638]=0;
+ a[2639]=0;
+ a[2640]=0;
+ a[2641]=0;
+ a[2642]=0;
+ a[2643]=0;
+ a[2644]=0;
+ a[2645]=0;
+ a[2646]=0;
+ a[2647]=0;
+ a[2648]=0;
+ a[2649]=0;
+ a[2650]=0;
+ a[2651]=0;
+ a[2652]=0;
+ a[2653]=0;
+ a[2654]=0;
+ a[2655]=0;
+ a[2656]=0;
+ a[2657]=0;
+ a[2658]=0;
+ a[2659]=0;
+ a[2660]=0;
+ a[2661]=0;
+ a[2662]=0;
+ a[2663]=0;
+ a[2664]=0;
+ a[2665]=0;
+ a[2666]=0;
+ a[2667]=0;
+ a[2668]=0;
+ a[2669]=0;
+ a[2670]=0;
+ a[2671]=0;
+ a[2672]=0;
+ a[2673]=0;
+ a[2674]=0;
+ a[2675]=0;
+ a[2676]=0;
+ a[2677]=0;
+ a[2678]=0;
+ a[2679]=0;
+ a[2680]=0;
+ a[2681]=0;
+ a[2682]=0;
+ a[2683]=0;
+ a[2684]=0;
+ a[2685]=0;
+ a[2686]=0;
+ a[2687]=0;
+ a[2688]=0;
+ a[2689]=0;
+ a[2690]=0;
+ a[2691]=0;
+ a[2692]=0;
+ a[2693]=0;
+ a[2694]=0;
+ a[2695]=0;
+ a[2696]=0;
+ a[2697]=0;
+ a[2698]=0;
+ a[2699]=0;
+ a[2700]=0;
+ a[2701]=0;
+ a[2702]=0;
+ a[2703]=0;
+ a[2704]=0;
+ a[2705]=0;
+ a[2706]=0;
+ a[2707]=0;
+ a[2708]=0;
+ a[2709]=0;
+ a[2710]=0;
+ a[2711]=0;
+ a[2712]=0;
+ a[2713]=0;
+ a[2714]=0;
+ a[2715]=0;
+ a[2716]=0;
+ a[2717]=0;
+ a[2718]=0;
+ a[2719]=0;
+ a[2720]=0;
+ a[2721]=0;
+ a[2722]=0;
+ a[2723]=0;
+ a[2724]=0;
+ a[2725]=0;
+ a[2726]=0;
+ a[2727]=0;
+ a[2728]=0;
+ a[2729]=0;
+ a[2730]=0;
+ a[2731]=0;
+ a[2732]=0;
+ a[2733]=0;
+ a[2734]=0;
+ a[2735]=0;
+ a[2736]=0;
+ a[2737]=0;
+ a[2738]=0;
+ a[2739]=0;
+ a[2740]=0;
+ a[2741]=0;
+ a[2742]=0;
+ a[2743]=0;
+ a[2744]=0;
+ a[2745]=0;
+ a[2746]=0;
+ a[2747]=0;
+ a[2748]=0;
+ a[2749]=0;
+ a[2750]=0;
+ a[2751]=0;
+ a[2752]=0;
+ a[2753]=0;
+ a[2754]=0;
+ a[2755]=0;
+ a[2756]=0;
+ a[2757]=0;
+ a[2758]=0;
+ a[2759]=0;
+ a[2760]=0;
+ a[2761]=0;
+ a[2762]=0;
+ a[2763]=0;
+ a[2764]=0;
+ a[2765]=0;
+ a[2766]=0;
+ a[2767]=0;
+ a[2768]=0;
+ a[2769]=0;
+ a[2770]=0;
+ a[2771]=0;
+ a[2772]=0;
+ a[2773]=0;
+ a[2774]=0;
+ a[2775]=0;
+ a[2776]=0;
+ a[2777]=0;
+ a[2778]=0;
+ a[2779]=0;
+ a[2780]=0;
+ a[2781]=0;
+ a[2782]=0;
+ a[2783]=0;
+ a[2784]=0;
+ a[2785]=0;
+ a[2786]=0;
+ a[2787]=0;
+ a[2788]=0;
+ a[2789]=0;
+ a[2790]=0;
+ a[2791]=0;
+ a[2792]=0;
+ a[2793]=0;
+ a[2794]=0;
+ a[2795]=0;
+ a[2796]=0;
+ a[2797]=0;
+ a[2798]=0;
+ a[2799]=0;
+ a[2800]=0;
+ a[2801]=0;
+ a[2802]=0;
+ a[2803]=0;
+ a[2804]=0;
+ a[2805]=0;
+ a[2806]=0;
+ a[2807]=0;
+ a[2808]=0;
+ a[2809]=0;
+ a[2810]=0;
+ a[2811]=0;
+ a[2812]=0;
+ a[2813]=0;
+ a[2814]=0;
+ a[2815]=0;
+ a[2816]=0;
+ a[2817]=0;
+ a[2818]=0;
+ a[2819]=0;
+ a[2820]=0;
+ a[2821]=0;
+ a[2822]=0;
+ a[2823]=0;
+ a[2824]=0;
+ a[2825]=0;
+ a[2826]=0;
+ a[2827]=0;
+ a[2828]=0;
+ a[2829]=0;
+ a[2830]=0;
+ a[2831]=0;
+ a[2832]=0;
+ a[2833]=0;
+ a[2834]=0;
+ a[2835]=0;
+ a[2836]=0;
+ a[2837]=0;
+ a[2838]=0;
+ a[2839]=0;
+ a[2840]=0;
+ a[2841]=0;
+ a[2842]=0;
+ a[2843]=0;
+ a[2844]=0;
+ a[2845]=0;
+ a[2846]=0;
+ a[2847]=0;
+ a[2848]=0;
+ a[2849]=0;
+ a[2850]=0;
+ a[2851]=0;
+ a[2852]=0;
+ a[2853]=0;
+ a[2854]=0;
+ a[2855]=0;
+ a[2856]=0;
+ a[2857]=0;
+ a[2858]=0;
+ a[2859]=0;
+ a[2860]=0;
+ a[2861]=0;
+ a[2862]=0;
+ a[2863]=0;
+ a[2864]=0;
+ a[2865]=0;
+ a[2866]=0;
+ a[2867]=0;
+ a[2868]=0;
+ a[2869]=0;
+ a[2870]=0;
+ a[2871]=0;
+ a[2872]=0;
+ a[2873]=0;
+ a[2874]=0;
+ a[2875]=0;
+ a[2876]=0;
+ a[2877]=0;
+ a[2878]=0;
+ a[2879]=0;
+ a[2880]=0;
+ a[2881]=0;
+ a[2882]=0;
+ a[2883]=0;
+ a[2884]=0;
+ a[2885]=0;
+ a[2886]=0;
+ a[2887]=0;
+ a[2888]=0;
+ a[2889]=0;
+ a[2890]=0;
+ a[2891]=0;
+ a[2892]=0;
+ a[2893]=0;
+ a[2894]=0;
+ a[2895]=0;
+ a[2896]=0;
+ a[2897]=0;
+ a[2898]=0;
+ a[2899]=0;
+ a[2900]=0;
+ a[2901]=0;
+ a[2902]=0;
+ a[2903]=0;
+ a[2904]=0;
+ a[2905]=0;
+ a[2906]=0;
+ a[2907]=0;
+ a[2908]=0;
+ a[2909]=0;
+ a[2910]=0;
+ a[2911]=0;
+ a[2912]=0;
+ a[2913]=0;
+ a[2914]=0;
+ a[2915]=0;
+ a[2916]=0;
+ a[2917]=0;
+ a[2918]=0;
+ a[2919]=0;
+ a[2920]=0;
+ a[2921]=0;
+ a[2922]=0;
+ a[2923]=0;
+ a[2924]=0;
+ a[2925]=0;
+ a[2926]=0;
+ a[2927]=0;
+ a[2928]=0;
+ a[2929]=0;
+ a[2930]=0;
+ a[2931]=0;
+ a[2932]=0;
+ a[2933]=0;
+ a[2934]=0;
+ a[2935]=0;
+ a[2936]=0;
+ a[2937]=0;
+ a[2938]=0;
+ a[2939]=0;
+ a[2940]=0;
+ a[2941]=0;
+ a[2942]=0;
+ a[2943]=0;
+ a[2944]=0;
+ a[2945]=0;
+ a[2946]=0;
+ a[2947]=0;
+ a[2948]=0;
+ a[2949]=0;
+ a[2950]=0;
+ a[2951]=0;
+ a[2952]=0;
+ a[2953]=0;
+ a[2954]=0;
+ a[2955]=0;
+ a[2956]=0;
+ a[2957]=0;
+ a[2958]=0;
+ a[2959]=0;
+ a[2960]=0;
+ a[2961]=0;
+ a[2962]=0;
+ a[2963]=0;
+ a[2964]=0;
+ a[2965]=0;
+ a[2966]=0;
+ a[2967]=0;
+ a[2968]=0;
+ a[2969]=0;
+ a[2970]=0;
+ a[2971]=0;
+ a[2972]=0;
+ a[2973]=0;
+ a[2974]=0;
+ a[2975]=0;
+ a[2976]=0;
+ a[2977]=0;
+ a[2978]=0;
+ a[2979]=0;
+ a[2980]=0;
+ a[2981]=0;
+ a[2982]=0;
+ a[2983]=0;
+ a[2984]=0;
+ a[2985]=0;
+ a[2986]=0;
+ a[2987]=0;
+ a[2988]=0;
+ a[2989]=0;
+ a[2990]=0;
+ a[2991]=0;
+ a[2992]=0;
+ a[2993]=0;
+ a[2994]=0;
+ a[2995]=0;
+ a[2996]=0;
+ a[2997]=0;
+ a[2998]=0;
+ a[2999]=0;
+ a[3000]=0;
+ a[3001]=0;
+ a[3002]=0;
+ a[3003]=0;
+ a[3004]=0;
+ a[3005]=0;
+ a[3006]=0;
+ a[3007]=0;
+ a[3008]=0;
+ a[3009]=0;
+ a[3010]=0;
+ a[3011]=0;
+ a[3012]=0;
+ a[3013]=0;
+ a[3014]=0;
+ a[3015]=0;
+ a[3016]=0;
+ a[3017]=0;
+ a[3018]=0;
+ a[3019]=0;
+ a[3020]=0;
+ a[3021]=0;
+ a[3022]=0;
+ a[3023]=0;
+ a[3024]=0;
+ a[3025]=0;
+ a[3026]=0;
+ a[3027]=0;
+ a[3028]=0;
+ a[3029]=0;
+ a[3030]=0;
+ a[3031]=0;
+ a[3032]=0;
+ a[3033]=0;
+ a[3034]=0;
+ a[3035]=0;
+ a[3036]=0;
+ a[3037]=0;
+ a[3038]=0;
+ a[3039]=0;
+ a[3040]=0;
+ a[3041]=0;
+ a[3042]=0;
+ a[3043]=0;
+ a[3044]=0;
+ a[3045]=0;
+ a[3046]=0;
+ a[3047]=0;
+ a[3048]=0;
+ a[3049]=0;
+ a[3050]=0;
+ a[3051]=0;
+ a[3052]=0;
+ a[3053]=0;
+ a[3054]=0;
+ a[3055]=0;
+ a[3056]=0;
+ a[3057]=0;
+ a[3058]=0;
+ a[3059]=0;
+ a[3060]=0;
+ a[3061]=0;
+ a[3062]=0;
+ a[3063]=0;
+ a[3064]=0;
+ a[3065]=0;
+ a[3066]=0;
+ a[3067]=0;
+ a[3068]=0;
+ a[3069]=0;
+ a[3070]=0;
+ a[3071]=0;
+ a[3072]=0;
+ a[3073]=0;
+ a[3074]=0;
+ a[3075]=0;
+ a[3076]=0;
+ a[3077]=0;
+ a[3078]=0;
+ a[3079]=0;
+ a[3080]=0;
+ a[3081]=0;
+ a[3082]=0;
+ a[3083]=0;
+ a[3084]=0;
+ a[3085]=0;
+ a[3086]=0;
+ a[3087]=0;
+ a[3088]=0;
+ a[3089]=0;
+ a[3090]=0;
+ a[3091]=0;
+ a[3092]=0;
+ a[3093]=0;
+ a[3094]=0;
+ a[3095]=0;
+ a[3096]=0;
+ a[3097]=0;
+ a[3098]=0;
+ a[3099]=0;
+ a[3100]=0;
+ a[3101]=0;
+ a[3102]=0;
+ a[3103]=0;
+ a[3104]=0;
+ a[3105]=0;
+ a[3106]=0;
+ a[3107]=0;
+ a[3108]=0;
+ a[3109]=0;
+ a[3110]=0;
+ a[3111]=0;
+ a[3112]=0;
+ a[3113]=0;
+ a[3114]=0;
+ a[3115]=0;
+ a[3116]=0;
+ a[3117]=0;
+ a[3118]=0;
+ a[3119]=0;
+ a[3120]=0;
+ a[3121]=0;
+ a[3122]=0;
+ a[3123]=0;
+ a[3124]=0;
+ a[3125]=0;
+ a[3126]=0;
+ a[3127]=0;
+ a[3128]=0;
+ a[3129]=0;
+ a[3130]=0;
+ a[3131]=0;
+ a[3132]=0;
+ a[3133]=0;
+ a[3134]=0;
+ a[3135]=0;
+ a[3136]=0;
+ a[3137]=0;
+ a[3138]=0;
+ a[3139]=0;
+ a[3140]=0;
+ a[3141]=0;
+ a[3142]=0;
+ a[3143]=0;
+ a[3144]=0;
+ a[3145]=0;
+ a[3146]=0;
+ a[3147]=0;
+ a[3148]=0;
+ a[3149]=0;
+ a[3150]=0;
+ a[3151]=0;
+ a[3152]=0;
+ a[3153]=0;
+ a[3154]=0;
+ a[3155]=0;
+ a[3156]=0;
+ a[3157]=0;
+ a[3158]=0;
+ a[3159]=0;
+ a[3160]=0;
+ a[3161]=0;
+ a[3162]=0;
+ a[3163]=0;
+ a[3164]=0;
+ a[3165]=0;
+ a[3166]=0;
+ a[3167]=0;
+ a[3168]=0;
+ a[3169]=0;
+ a[3170]=0;
+ a[3171]=0;
+ a[3172]=0;
+ a[3173]=0;
+ a[3174]=0;
+ a[3175]=0;
+ a[3176]=0;
+ a[3177]=0;
+ a[3178]=0;
+ a[3179]=0;
+ a[3180]=0;
+ a[3181]=0;
+ a[3182]=0;
+ a[3183]=0;
+ a[3184]=0;
+ a[3185]=0;
+ a[3186]=0;
+ a[3187]=0;
+ a[3188]=0;
+ a[3189]=0;
+ a[3190]=0;
+ a[3191]=0;
+ a[3192]=0;
+ a[3193]=0;
+ a[3194]=0;
+ a[3195]=0;
+ a[3196]=0;
+ a[3197]=0;
+ a[3198]=0;
+ a[3199]=0;
+ a[3200]=0;
+ a[3201]=0;
+ a[3202]=0;
+ a[3203]=0;
+ a[3204]=0;
+ a[3205]=0;
+ a[3206]=0;
+ a[3207]=0;
+ a[3208]=0;
+ a[3209]=0;
+ a[3210]=0;
+ a[3211]=0;
+ a[3212]=0;
+ a[3213]=0;
+ a[3214]=0;
+ a[3215]=0;
+ a[3216]=0;
+ a[3217]=0;
+ a[3218]=0;
+ a[3219]=0;
+ a[3220]=0;
+ a[3221]=0;
+ a[3222]=0;
+ a[3223]=0;
+ a[3224]=0;
+ a[3225]=0;
+ a[3226]=0;
+ a[3227]=0;
+ a[3228]=0;
+ a[3229]=0;
+ a[3230]=0;
+ a[3231]=0;
+ a[3232]=0;
+ a[3233]=0;
+ a[3234]=0;
+ a[3235]=0;
+ a[3236]=0;
+ a[3237]=0;
+ a[3238]=0;
+ a[3239]=0;
+ a[3240]=0;
+ a[3241]=0;
+ a[3242]=0;
+ a[3243]=0;
+ a[3244]=0;
+ a[3245]=0;
+ a[3246]=0;
+ a[3247]=0;
+ a[3248]=0;
+ a[3249]=0;
+ a[3250]=0;
+ a[3251]=0;
+ a[3252]=0;
+ a[3253]=0;
+ a[3254]=0;
+ a[3255]=0;
+ a[3256]=0;
+ a[3257]=0;
+ a[3258]=0;
+ a[3259]=0;
+ a[3260]=0;
+ a[3261]=0;
+ a[3262]=0;
+ a[3263]=0;
+ a[3264]=0;
+ a[3265]=0;
+ a[3266]=0;
+ a[3267]=0;
+ a[3268]=0;
+ a[3269]=0;
+ a[3270]=0;
+ a[3271]=0;
+ a[3272]=0;
+ a[3273]=0;
+ a[3274]=0;
+ a[3275]=0;
+ a[3276]=0;
+ a[3277]=0;
+ a[3278]=0;
+ a[3279]=0;
+ a[3280]=0;
+ a[3281]=0;
+ a[3282]=0;
+ a[3283]=0;
+ a[3284]=0;
+ a[3285]=0;
+ a[3286]=0;
+ a[3287]=0;
+ a[3288]=0;
+ a[3289]=0;
+ a[3290]=0;
+ a[3291]=0;
+ a[3292]=0;
+ a[3293]=0;
+ a[3294]=0;
+ a[3295]=0;
+ a[3296]=0;
+ a[3297]=0;
+ a[3298]=0;
+ a[3299]=0;
+ a[3300]=0;
+ a[3301]=0;
+ a[3302]=0;
+ a[3303]=0;
+ a[3304]=0;
+ a[3305]=0;
+ a[3306]=0;
+ a[3307]=0;
+ a[3308]=0;
+ a[3309]=0;
+ a[3310]=0;
+ a[3311]=0;
+ a[3312]=0;
+ a[3313]=0;
+ a[3314]=0;
+ a[3315]=0;
+ a[3316]=0;
+ a[3317]=0;
+ a[3318]=0;
+ a[3319]=0;
+ a[3320]=0;
+ a[3321]=0;
+ a[3322]=0;
+ a[3323]=0;
+ a[3324]=0;
+ a[3325]=0;
+ a[3326]=0;
+ a[3327]=0;
+ a[3328]=0;
+ a[3329]=0;
+ a[3330]=0;
+ a[3331]=0;
+ a[3332]=0;
+ a[3333]=0;
+ a[3334]=0;
+ a[3335]=0;
+ a[3336]=0;
+ a[3337]=0;
+ a[3338]=0;
+ a[3339]=0;
+ a[3340]=0;
+ a[3341]=0;
+ a[3342]=0;
+ a[3343]=0;
+ a[3344]=0;
+ a[3345]=0;
+ a[3346]=0;
+ a[3347]=0;
+ a[3348]=0;
+ a[3349]=0;
+ a[3350]=0;
+ a[3351]=0;
+ a[3352]=0;
+ a[3353]=0;
+ a[3354]=0;
+ a[3355]=0;
+ a[3356]=0;
+ a[3357]=0;
+ a[3358]=0;
+ a[3359]=0;
+ a[3360]=0;
+ a[3361]=0;
+ a[3362]=0;
+ a[3363]=0;
+ a[3364]=0;
+ a[3365]=0;
+ a[3366]=0;
+ a[3367]=0;
+ a[3368]=0;
+ a[3369]=0;
+ a[3370]=0;
+ a[3371]=0;
+ a[3372]=0;
+ a[3373]=0;
+ a[3374]=0;
+ a[3375]=0;
+ a[3376]=0;
+ a[3377]=0;
+ a[3378]=0;
+ a[3379]=0;
+ a[3380]=0;
+ a[3381]=0;
+ a[3382]=0;
+ a[3383]=0;
+ a[3384]=0;
+ a[3385]=0;
+ a[3386]=0;
+ a[3387]=0;
+ a[3388]=0;
+ a[3389]=0;
+ a[3390]=0;
+ a[3391]=0;
+ a[3392]=0;
+ a[3393]=0;
+ a[3394]=0;
+ a[3395]=0;
+ a[3396]=0;
+ a[3397]=0;
+ a[3398]=0;
+ a[3399]=0;
+ a[3400]=0;
+ a[3401]=0;
+ a[3402]=0;
+ a[3403]=0;
+ a[3404]=0;
+ a[3405]=0;
+ a[3406]=0;
+ a[3407]=0;
+ a[3408]=0;
+ a[3409]=0;
+ a[3410]=0;
+ a[3411]=0;
+ a[3412]=0;
+ a[3413]=0;
+ a[3414]=0;
+ a[3415]=0;
+ a[3416]=0;
+ a[3417]=0;
+ a[3418]=0;
+ a[3419]=0;
+ a[3420]=0;
+ a[3421]=0;
+ a[3422]=0;
+ a[3423]=0;
+ a[3424]=0;
+ a[3425]=0;
+ a[3426]=0;
+ a[3427]=0;
+ a[3428]=0;
+ a[3429]=0;
+ a[3430]=0;
+ a[3431]=0;
+ a[3432]=0;
+ a[3433]=0;
+ a[3434]=0;
+ a[3435]=0;
+ a[3436]=0;
+ a[3437]=0;
+ a[3438]=0;
+ a[3439]=0;
+ a[3440]=0;
+ a[3441]=0;
+ a[3442]=0;
+ a[3443]=0;
+ a[3444]=0;
+ a[3445]=0;
+ a[3446]=0;
+ a[3447]=0;
+ a[3448]=0;
+ a[3449]=0;
+ a[3450]=0;
+ a[3451]=0;
+ a[3452]=0;
+ a[3453]=0;
+ a[3454]=0;
+ a[3455]=0;
+ a[3456]=0;
+ a[3457]=0;
+ a[3458]=0;
+ a[3459]=0;
+ a[3460]=0;
+ a[3461]=0;
+ a[3462]=0;
+ a[3463]=0;
+ a[3464]=0;
+ a[3465]=0;
+ a[3466]=0;
+ a[3467]=0;
+ a[3468]=0;
+ a[3469]=0;
+ a[3470]=0;
+ a[3471]=0;
+ a[3472]=0;
+ a[3473]=0;
+ a[3474]=0;
+ a[3475]=0;
+ a[3476]=0;
+ a[3477]=0;
+ a[3478]=0;
+ a[3479]=0;
+ a[3480]=0;
+ a[3481]=0;
+ a[3482]=0;
+ a[3483]=0;
+ a[3484]=0;
+ a[3485]=0;
+ a[3486]=0;
+ a[3487]=0;
+ a[3488]=0;
+ a[3489]=0;
+ a[3490]=0;
+ a[3491]=0;
+ a[3492]=0;
+ a[3493]=0;
+ a[3494]=0;
+ a[3495]=0;
+ a[3496]=0;
+ a[3497]=0;
+ a[3498]=0;
+ a[3499]=0;
+ a[3500]=0;
+ a[3501]=0;
+ a[3502]=0;
+ a[3503]=0;
+ a[3504]=0;
+ a[3505]=0;
+ a[3506]=0;
+ a[3507]=0;
+ a[3508]=0;
+ a[3509]=0;
+ a[3510]=0;
+ a[3511]=0;
+ a[3512]=0;
+ a[3513]=0;
+ a[3514]=0;
+ a[3515]=0;
+ a[3516]=0;
+ a[3517]=0;
+ a[3518]=0;
+ a[3519]=0;
+ a[3520]=0;
+ a[3521]=0;
+ a[3522]=0;
+ a[3523]=0;
+ a[3524]=0;
+ a[3525]=0;
+ a[3526]=0;
+ a[3527]=0;
+ a[3528]=0;
+ a[3529]=0;
+ a[3530]=0;
+ a[3531]=0;
+ a[3532]=0;
+ a[3533]=0;
+ a[3534]=0;
+ a[3535]=0;
+ a[3536]=0;
+ a[3537]=0;
+ a[3538]=0;
+ a[3539]=0;
+ a[3540]=0;
+ a[3541]=0;
+ a[3542]=0;
+ a[3543]=0;
+ a[3544]=0;
+ a[3545]=0;
+ a[3546]=0;
+ a[3547]=0;
+ a[3548]=0;
+ a[3549]=0;
+ a[3550]=0;
+ a[3551]=0;
+ a[3552]=0;
+ a[3553]=0;
+ a[3554]=0;
+ a[3555]=0;
+ a[3556]=0;
+ a[3557]=0;
+ a[3558]=0;
+ a[3559]=0;
+ a[3560]=0;
+ a[3561]=0;
+ a[3562]=0;
+ a[3563]=0;
+ a[3564]=0;
+ a[3565]=0;
+ a[3566]=0;
+ a[3567]=0;
+ a[3568]=0;
+ a[3569]=0;
+ a[3570]=0;
+ a[3571]=0;
+ a[3572]=0;
+ a[3573]=0;
+ a[3574]=0;
+ a[3575]=0;
+ a[3576]=0;
+ a[3577]=0;
+ a[3578]=0;
+ a[3579]=0;
+ a[3580]=0;
+ a[3581]=0;
+ a[3582]=0;
+ a[3583]=0;
+ a[3584]=0;
+ a[3585]=0;
+ a[3586]=0;
+ a[3587]=0;
+ a[3588]=0;
+ a[3589]=0;
+ a[3590]=0;
+ a[3591]=0;
+ a[3592]=0;
+ a[3593]=0;
+ a[3594]=0;
+ a[3595]=0;
+ a[3596]=0;
+ a[3597]=0;
+ a[3598]=0;
+ a[3599]=0;
+ a[3600]=0;
+ a[3601]=0;
+ a[3602]=0;
+ a[3603]=0;
+ a[3604]=0;
+ a[3605]=0;
+ a[3606]=0;
+ a[3607]=0;
+ a[3608]=0;
+ a[3609]=0;
+ a[3610]=0;
+ a[3611]=0;
+ a[3612]=0;
+ a[3613]=0;
+ a[3614]=0;
+ a[3615]=0;
+ a[3616]=0;
+ a[3617]=0;
+ a[3618]=0;
+ a[3619]=0;
+ a[3620]=0;
+ a[3621]=0;
+ a[3622]=0;
+ a[3623]=0;
+ a[3624]=0;
+ a[3625]=0;
+ a[3626]=0;
+ a[3627]=0;
+ a[3628]=0;
+ a[3629]=0;
+ a[3630]=0;
+ a[3631]=0;
+ a[3632]=0;
+ a[3633]=0;
+ a[3634]=0;
+ a[3635]=0;
+ a[3636]=0;
+ a[3637]=0;
+ a[3638]=0;
+ a[3639]=0;
+ a[3640]=0;
+ a[3641]=0;
+ a[3642]=0;
+ a[3643]=0;
+ a[3644]=0;
+ a[3645]=0;
+ a[3646]=0;
+ a[3647]=0;
+ a[3648]=0;
+ a[3649]=0;
+ a[3650]=0;
+ a[3651]=0;
+ a[3652]=0;
+ a[3653]=0;
+ a[3654]=0;
+ a[3655]=0;
+ a[3656]=0;
+ a[3657]=0;
+ a[3658]=0;
+ a[3659]=0;
+ a[3660]=0;
+ a[3661]=0;
+ a[3662]=0;
+ a[3663]=0;
+ a[3664]=0;
+ a[3665]=0;
+ a[3666]=0;
+ a[3667]=0;
+ a[3668]=0;
+ a[3669]=0;
+ a[3670]=0;
+ a[3671]=0;
+ a[3672]=0;
+ a[3673]=0;
+ a[3674]=0;
+ a[3675]=0;
+ a[3676]=0;
+ a[3677]=0;
+ a[3678]=0;
+ a[3679]=0;
+ a[3680]=0;
+ a[3681]=0;
+ a[3682]=0;
+ a[3683]=0;
+ a[3684]=0;
+ a[3685]=0;
+ a[3686]=0;
+ a[3687]=0;
+ a[3688]=0;
+ a[3689]=0;
+ a[3690]=0;
+ a[3691]=0;
+ a[3692]=0;
+ a[3693]=0;
+ a[3694]=0;
+ a[3695]=0;
+ a[3696]=0;
+ a[3697]=0;
+ a[3698]=0;
+ a[3699]=0;
+ a[3700]=0;
+ a[3701]=0;
+ a[3702]=0;
+ a[3703]=0;
+ a[3704]=0;
+ a[3705]=0;
+ a[3706]=0;
+ a[3707]=0;
+ a[3708]=0;
+ a[3709]=0;
+ a[3710]=0;
+ a[3711]=0;
+ a[3712]=0;
+ a[3713]=0;
+ a[3714]=0;
+ a[3715]=0;
+ a[3716]=0;
+ a[3717]=0;
+ a[3718]=0;
+ a[3719]=0;
+ a[3720]=0;
+ a[3721]=0;
+ a[3722]=0;
+ a[3723]=0;
+ a[3724]=0;
+ a[3725]=0;
+ a[3726]=0;
+ a[3727]=0;
+ a[3728]=0;
+ a[3729]=0;
+ a[3730]=0;
+ a[3731]=0;
+ a[3732]=0;
+ a[3733]=0;
+ a[3734]=0;
+ a[3735]=0;
+ a[3736]=0;
+ a[3737]=0;
+ a[3738]=0;
+ a[3739]=0;
+ a[3740]=0;
+ a[3741]=0;
+ a[3742]=0;
+ a[3743]=0;
+ a[3744]=0;
+ a[3745]=0;
+ a[3746]=0;
+ a[3747]=0;
+ a[3748]=0;
+ a[3749]=0;
+ a[3750]=0;
+ a[3751]=0;
+ a[3752]=0;
+ a[3753]=0;
+ a[3754]=0;
+ a[3755]=0;
+ a[3756]=0;
+ a[3757]=0;
+ a[3758]=0;
+ a[3759]=0;
+ a[3760]=0;
+ a[3761]=0;
+ a[3762]=0;
+ a[3763]=0;
+ a[3764]=0;
+ a[3765]=0;
+ a[3766]=0;
+ a[3767]=0;
+ a[3768]=0;
+ a[3769]=0;
+ a[3770]=0;
+ a[3771]=0;
+ a[3772]=0;
+ a[3773]=0;
+ a[3774]=0;
+ a[3775]=0;
+ a[3776]=0;
+ a[3777]=0;
+ a[3778]=0;
+ a[3779]=0;
+ a[3780]=0;
+ a[3781]=0;
+ a[3782]=0;
+ a[3783]=0;
+ a[3784]=0;
+ a[3785]=0;
+ a[3786]=0;
+ a[3787]=0;
+ a[3788]=0;
+ a[3789]=0;
+ a[3790]=0;
+ a[3791]=0;
+ a[3792]=0;
+ a[3793]=0;
+ a[3794]=0;
+ a[3795]=0;
+ a[3796]=0;
+ a[3797]=0;
+ a[3798]=0;
+ a[3799]=0;
+ a[3800]=0;
+ a[3801]=0;
+ a[3802]=0;
+ a[3803]=0;
+ a[3804]=0;
+ a[3805]=0;
+ a[3806]=0;
+ a[3807]=0;
+ a[3808]=0;
+ a[3809]=0;
+ a[3810]=0;
+ a[3811]=0;
+ a[3812]=0;
+ a[3813]=0;
+ a[3814]=0;
+ a[3815]=0;
+ a[3816]=0;
+ a[3817]=0;
+ a[3818]=0;
+ a[3819]=0;
+ a[3820]=0;
+ a[3821]=0;
+ a[3822]=0;
+ a[3823]=0;
+ a[3824]=0;
+ a[3825]=0;
+ a[3826]=0;
+ a[3827]=0;
+ a[3828]=0;
+ a[3829]=0;
+ a[3830]=0;
+ a[3831]=0;
+ a[3832]=0;
+ a[3833]=0;
+ a[3834]=0;
+ a[3835]=0;
+ a[3836]=0;
+ a[3837]=0;
+ a[3838]=0;
+ a[3839]=0;
+ a[3840]=0;
+ a[3841]=0;
+ a[3842]=0;
+ a[3843]=0;
+ a[3844]=0;
+ a[3845]=0;
+ a[3846]=0;
+ a[3847]=0;
+ a[3848]=0;
+ a[3849]=0;
+ a[3850]=0;
+ a[3851]=0;
+ a[3852]=0;
+ a[3853]=0;
+ a[3854]=0;
+ a[3855]=0;
+ a[3856]=0;
+ a[3857]=0;
+ a[3858]=0;
+ a[3859]=0;
+ a[3860]=0;
+ a[3861]=0;
+ a[3862]=0;
+ a[3863]=0;
+ a[3864]=0;
+ a[3865]=0;
+ a[3866]=0;
+ a[3867]=0;
+ a[3868]=0;
+ a[3869]=0;
+ a[3870]=0;
+ a[3871]=0;
+ a[3872]=0;
+ a[3873]=0;
+ a[3874]=0;
+ a[3875]=0;
+ a[3876]=0;
+ a[3877]=0;
+ a[3878]=0;
+ a[3879]=0;
+ a[3880]=0;
+ a[3881]=0;
+ a[3882]=0;
+ a[3883]=0;
+ a[3884]=0;
+ a[3885]=0;
+ a[3886]=0;
+ a[3887]=0;
+ a[3888]=0;
+ a[3889]=0;
+ a[3890]=0;
+ a[3891]=0;
+ a[3892]=0;
+ a[3893]=0;
+ a[3894]=0;
+ a[3895]=0;
+ a[3896]=0;
+ a[3897]=0;
+ a[3898]=0;
+ a[3899]=0;
+ a[3900]=0;
+ a[3901]=0;
+ a[3902]=0;
+ a[3903]=0;
+ a[3904]=0;
+ a[3905]=0;
+ a[3906]=0;
+ a[3907]=0;
+ a[3908]=0;
+ a[3909]=0;
+ a[3910]=0;
+ a[3911]=0;
+ a[3912]=0;
+ a[3913]=0;
+ a[3914]=0;
+ a[3915]=0;
+ a[3916]=0;
+ a[3917]=0;
+ a[3918]=0;
+ a[3919]=0;
+ a[3920]=0;
+ a[3921]=0;
+ a[3922]=0;
+ a[3923]=0;
+ a[3924]=0;
+ a[3925]=0;
+ a[3926]=0;
+ a[3927]=0;
+ a[3928]=0;
+ a[3929]=0;
+ a[3930]=0;
+ a[3931]=0;
+ a[3932]=0;
+ a[3933]=0;
+ a[3934]=0;
+ a[3935]=0;
+ a[3936]=0;
+ a[3937]=0;
+ a[3938]=0;
+ a[3939]=0;
+ a[3940]=0;
+ a[3941]=0;
+ a[3942]=0;
+ a[3943]=0;
+ a[3944]=0;
+ a[3945]=0;
+ a[3946]=0;
+ a[3947]=0;
+ a[3948]=0;
+ a[3949]=0;
+ a[3950]=0;
+ a[3951]=0;
+ a[3952]=0;
+ a[3953]=0;
+ a[3954]=0;
+ a[3955]=0;
+ a[3956]=0;
+ a[3957]=0;
+ a[3958]=0;
+ a[3959]=0;
+ a[3960]=0;
+ a[3961]=0;
+ a[3962]=0;
+ a[3963]=0;
+ a[3964]=0;
+ a[3965]=0;
+ a[3966]=0;
+ a[3967]=0;
+ a[3968]=0;
+ a[3969]=0;
+ a[3970]=0;
+ a[3971]=0;
+ a[3972]=0;
+ a[3973]=0;
+ a[3974]=0;
+ a[3975]=0;
+ a[3976]=0;
+ a[3977]=0;
+ a[3978]=0;
+ a[3979]=0;
+ a[3980]=0;
+ a[3981]=0;
+ a[3982]=0;
+ a[3983]=0;
+ a[3984]=0;
+ a[3985]=0;
+ a[3986]=0;
+ a[3987]=0;
+ a[3988]=0;
+ a[3989]=0;
+ a[3990]=0;
+ a[3991]=0;
+ a[3992]=0;
+ a[3993]=0;
+ a[3994]=0;
+ a[3995]=0;
+ a[3996]=0;
+ a[3997]=0;
+ a[3998]=0;
+ a[3999]=0;
+ a[4000]=0;
+ a[4001]=0;
+ a[4002]=0;
+ a[4003]=0;
+ a[4004]=0;
+ a[4005]=0;
+ a[4006]=0;
+ a[4007]=0;
+ a[4008]=0;
+ a[4009]=0;
+ a[4010]=0;
+ a[4011]=0;
+ a[4012]=0;
+ a[4013]=0;
+ a[4014]=0;
+ a[4015]=0;
+ a[4016]=0;
+ a[4017]=0;
+ a[4018]=0;
+ a[4019]=0;
+ a[4020]=0;
+ a[4021]=0;
+ a[4022]=0;
+ a[4023]=0;
+ a[4024]=0;
+ a[4025]=0;
+ a[4026]=0;
+ a[4027]=0;
+ a[4028]=0;
+ a[4029]=0;
+ a[4030]=0;
+ a[4031]=0;
+ a[4032]=0;
+ a[4033]=0;
+ a[4034]=0;
+ a[4035]=0;
+ a[4036]=0;
+ a[4037]=0;
+ a[4038]=0;
+ a[4039]=0;
+ a[4040]=0;
+ a[4041]=0;
+ a[4042]=0;
+ a[4043]=0;
+ a[4044]=0;
+ a[4045]=0;
+ a[4046]=0;
+ a[4047]=0;
+ a[4048]=0;
+ a[4049]=0;
+ a[4050]=0;
+ a[4051]=0;
+ a[4052]=0;
+ a[4053]=0;
+ a[4054]=0;
+ a[4055]=0;
+ a[4056]=0;
+ a[4057]=0;
+ a[4058]=0;
+ a[4059]=0;
+ a[4060]=0;
+ a[4061]=0;
+ a[4062]=0;
+ a[4063]=0;
+ a[4064]=0;
+ a[4065]=0;
+ a[4066]=0;
+ a[4067]=0;
+ a[4068]=0;
+ a[4069]=0;
+ a[4070]=0;
+ a[4071]=0;
+ a[4072]=0;
+ a[4073]=0;
+ a[4074]=0;
+ a[4075]=0;
+ a[4076]=0;
+ a[4077]=0;
+ a[4078]=0;
+ a[4079]=0;
+ a[4080]=0;
+ a[4081]=0;
+ a[4082]=0;
+ a[4083]=0;
+ a[4084]=0;
+ a[4085]=0;
+ a[4086]=0;
+ a[4087]=0;
+ a[4088]=0;
+ a[4089]=0;
+ a[4090]=0;
+ a[4091]=0;
+ a[4092]=0;
+ a[4093]=0;
+ a[4094]=0;
+ a[4095]=0;
+ a[4096]=0;
+ a[4097]=0;
+ a[4098]=0;
+ a[4099]=0;
+ a[4100]=0;
+ a[4101]=0;
+ a[4102]=0;
+ a[4103]=0;
+ a[4104]=0;
+ a[4105]=0;
+ a[4106]=0;
+ a[4107]=0;
+ a[4108]=0;
+ a[4109]=0;
+ a[4110]=0;
+ a[4111]=0;
+ a[4112]=0;
+ a[4113]=0;
+ a[4114]=0;
+ a[4115]=0;
+ a[4116]=0;
+ a[4117]=0;
+ a[4118]=0;
+ a[4119]=0;
+ a[4120]=0;
+ a[4121]=0;
+ a[4122]=0;
+ a[4123]=0;
+ a[4124]=0;
+ a[4125]=0;
+ a[4126]=0;
+ a[4127]=0;
+ a[4128]=0;
+ a[4129]=0;
+ a[4130]=0;
+ a[4131]=0;
+ a[4132]=0;
+ a[4133]=0;
+ a[4134]=0;
+ a[4135]=0;
+ a[4136]=0;
+ a[4137]=0;
+ a[4138]=0;
+ a[4139]=0;
+ a[4140]=0;
+ a[4141]=0;
+ a[4142]=0;
+ a[4143]=0;
+ a[4144]=0;
+ a[4145]=0;
+ a[4146]=0;
+ a[4147]=0;
+ a[4148]=0;
+ a[4149]=0;
+ a[4150]=0;
+ a[4151]=0;
+ a[4152]=0;
+ a[4153]=0;
+ a[4154]=0;
+ a[4155]=0;
+ a[4156]=0;
+ a[4157]=0;
+ a[4158]=0;
+ a[4159]=0;
+ a[4160]=0;
+ a[4161]=0;
+ a[4162]=0;
+ a[4163]=0;
+ a[4164]=0;
+ a[4165]=0;
+ a[4166]=0;
+ a[4167]=0;
+ a[4168]=0;
+ a[4169]=0;
+ a[4170]=0;
+ a[4171]=0;
+ a[4172]=0;
+ a[4173]=0;
+ a[4174]=0;
+ a[4175]=0;
+ a[4176]=0;
+ a[4177]=0;
+ a[4178]=0;
+ a[4179]=0;
+ a[4180]=0;
+ a[4181]=0;
+ a[4182]=0;
+ a[4183]=0;
+ a[4184]=0;
+ a[4185]=0;
+ a[4186]=0;
+ a[4187]=0;
+ a[4188]=0;
+ a[4189]=0;
+ a[4190]=0;
+ a[4191]=0;
+ a[4192]=0;
+ a[4193]=0;
+ a[4194]=0;
+ a[4195]=0;
+ a[4196]=0;
+ a[4197]=0;
+ a[4198]=0;
+ a[4199]=0;
+ a[4200]=0;
+ a[4201]=0;
+ a[4202]=0;
+ a[4203]=0;
+ a[4204]=0;
+ a[4205]=0;
+ a[4206]=0;
+ a[4207]=0;
+ a[4208]=0;
+ a[4209]=0;
+ a[4210]=0;
+ a[4211]=0;
+ a[4212]=0;
+ a[4213]=0;
+ a[4214]=0;
+ a[4215]=0;
+ a[4216]=0;
+ a[4217]=0;
+ a[4218]=0;
+ a[4219]=0;
+ a[4220]=0;
+ a[4221]=0;
+ a[4222]=0;
+ a[4223]=0;
+ a[4224]=0;
+ a[4225]=0;
+ a[4226]=0;
+ a[4227]=0;
+ a[4228]=0;
+ a[4229]=0;
+ a[4230]=0;
+ a[4231]=0;
+ a[4232]=0;
+ a[4233]=0;
+ a[4234]=0;
+ a[4235]=0;
+ a[4236]=0;
+ a[4237]=0;
+ a[4238]=0;
+ a[4239]=0;
+ a[4240]=0;
+ a[4241]=0;
+ a[4242]=0;
+ a[4243]=0;
+ a[4244]=0;
+ a[4245]=0;
+ a[4246]=0;
+ a[4247]=0;
+ a[4248]=0;
+ a[4249]=0;
+ a[4250]=0;
+ a[4251]=0;
+ a[4252]=0;
+ a[4253]=0;
+ a[4254]=0;
+ a[4255]=0;
+ a[4256]=0;
+ a[4257]=0;
+ a[4258]=0;
+ a[4259]=0;
+ a[4260]=0;
+ a[4261]=0;
+ a[4262]=0;
+ a[4263]=0;
+ a[4264]=0;
+ a[4265]=0;
+ a[4266]=0;
+ a[4267]=0;
+ a[4268]=0;
+ a[4269]=0;
+ a[4270]=0;
+ a[4271]=0;
+ a[4272]=0;
+ a[4273]=0;
+ a[4274]=0;
+ a[4275]=0;
+ a[4276]=0;
+ a[4277]=0;
+ a[4278]=0;
+ a[4279]=0;
+ a[4280]=0;
+ a[4281]=0;
+ a[4282]=0;
+ a[4283]=0;
+ a[4284]=0;
+ a[4285]=0;
+ a[4286]=0;
+ a[4287]=0;
+ a[4288]=0;
+ a[4289]=0;
+ a[4290]=0;
+ a[4291]=0;
+ a[4292]=0;
+ a[4293]=0;
+ a[4294]=0;
+ a[4295]=0;
+ a[4296]=0;
+ a[4297]=0;
+ a[4298]=0;
+ a[4299]=0;
+ a[4300]=0;
+ a[4301]=0;
+ a[4302]=0;
+ a[4303]=0;
+ a[4304]=0;
+ a[4305]=0;
+ a[4306]=0;
+ a[4307]=0;
+ a[4308]=0;
+ a[4309]=0;
+ a[4310]=0;
+ a[4311]=0;
+ a[4312]=0;
+ a[4313]=0;
+ a[4314]=0;
+ a[4315]=0;
+ a[4316]=0;
+ a[4317]=0;
+ a[4318]=0;
+ a[4319]=0;
+ a[4320]=0;
+ a[4321]=0;
+ a[4322]=0;
+ a[4323]=0;
+ a[4324]=0;
+ a[4325]=0;
+ a[4326]=0;
+ a[4327]=0;
+ a[4328]=0;
+ a[4329]=0;
+ a[4330]=0;
+ a[4331]=0;
+ a[4332]=0;
+ a[4333]=0;
+ a[4334]=0;
+ a[4335]=0;
+ a[4336]=0;
+ a[4337]=0;
+ a[4338]=0;
+ a[4339]=0;
+ a[4340]=0;
+ a[4341]=0;
+ a[4342]=0;
+ a[4343]=0;
+ a[4344]=0;
+ a[4345]=0;
+ a[4346]=0;
+ a[4347]=0;
+ a[4348]=0;
+ a[4349]=0;
+ a[4350]=0;
+ a[4351]=0;
+ a[4352]=0;
+ a[4353]=0;
+ a[4354]=0;
+ a[4355]=0;
+ a[4356]=0;
+ a[4357]=0;
+ a[4358]=0;
+ a[4359]=0;
+ a[4360]=0;
+ a[4361]=0;
+ a[4362]=0;
+ a[4363]=0;
+ a[4364]=0;
+ a[4365]=0;
+ a[4366]=0;
+ a[4367]=0;
+ a[4368]=0;
+ a[4369]=0;
+ a[4370]=0;
+ a[4371]=0;
+ a[4372]=0;
+ a[4373]=0;
+ a[4374]=0;
+ a[4375]=0;
+ a[4376]=0;
+ a[4377]=0;
+ a[4378]=0;
+ a[4379]=0;
+ a[4380]=0;
+ a[4381]=0;
+ a[4382]=0;
+ a[4383]=0;
+ a[4384]=0;
+ a[4385]=0;
+ a[4386]=0;
+ a[4387]=0;
+ a[4388]=0;
+ a[4389]=0;
+ a[4390]=0;
+ a[4391]=0;
+ a[4392]=0;
+ a[4393]=0;
+ a[4394]=0;
+ a[4395]=0;
+ a[4396]=0;
+ a[4397]=0;
+ a[4398]=0;
+ a[4399]=0;
+ a[4400]=0;
+ a[4401]=0;
+ a[4402]=0;
+ a[4403]=0;
+ a[4404]=0;
+ a[4405]=0;
+ a[4406]=0;
+ a[4407]=0;
+ a[4408]=0;
+ a[4409]=0;
+ a[4410]=0;
+ a[4411]=0;
+ a[4412]=0;
+ a[4413]=0;
+ a[4414]=0;
+ a[4415]=0;
+ a[4416]=0;
+ a[4417]=0;
+ a[4418]=0;
+ a[4419]=0;
+ a[4420]=0;
+ a[4421]=0;
+ a[4422]=0;
+ a[4423]=0;
+ a[4424]=0;
+ a[4425]=0;
+ a[4426]=0;
+ a[4427]=0;
+ a[4428]=0;
+ a[4429]=0;
+ a[4430]=0;
+ a[4431]=0;
+ a[4432]=0;
+ a[4433]=0;
+ a[4434]=0;
+ a[4435]=0;
+ a[4436]=0;
+ a[4437]=0;
+ a[4438]=0;
+ a[4439]=0;
+ a[4440]=0;
+ a[4441]=0;
+ a[4442]=0;
+ a[4443]=0;
+ a[4444]=0;
+ a[4445]=0;
+ a[4446]=0;
+ a[4447]=0;
+ a[4448]=0;
+ a[4449]=0;
+ a[4450]=0;
+ a[4451]=0;
+ a[4452]=0;
+ a[4453]=0;
+ a[4454]=0;
+ a[4455]=0;
+ a[4456]=0;
+ a[4457]=0;
+ a[4458]=0;
+ a[4459]=0;
+ a[4460]=0;
+ a[4461]=0;
+ a[4462]=0;
+ a[4463]=0;
+ a[4464]=0;
+ a[4465]=0;
+ a[4466]=0;
+ a[4467]=0;
+ a[4468]=0;
+ a[4469]=0;
+ a[4470]=0;
+ a[4471]=0;
+ a[4472]=0;
+ a[4473]=0;
+ a[4474]=0;
+ a[4475]=0;
+ a[4476]=0;
+ a[4477]=0;
+ a[4478]=0;
+ a[4479]=0;
+ a[4480]=0;
+ a[4481]=0;
+ a[4482]=0;
+ a[4483]=0;
+ a[4484]=0;
+ a[4485]=0;
+ a[4486]=0;
+ a[4487]=0;
+ a[4488]=0;
+ a[4489]=0;
+ a[4490]=0;
+ a[4491]=0;
+ a[4492]=0;
+ a[4493]=0;
+ a[4494]=0;
+ a[4495]=0;
+ a[4496]=0;
+ a[4497]=0;
+ a[4498]=0;
+ a[4499]=0;
+ a[4500]=0;
+ a[4501]=0;
+ a[4502]=0;
+ a[4503]=0;
+ a[4504]=0;
+ a[4505]=0;
+ a[4506]=0;
+ a[4507]=0;
+ a[4508]=0;
+ a[4509]=0;
+ a[4510]=0;
+ a[4511]=0;
+ a[4512]=0;
+ a[4513]=0;
+ a[4514]=0;
+ a[4515]=0;
+ a[4516]=0;
+ a[4517]=0;
+ a[4518]=0;
+ a[4519]=0;
+ a[4520]=0;
+ a[4521]=0;
+ a[4522]=0;
+ a[4523]=0;
+ a[4524]=0;
+ a[4525]=0;
+ a[4526]=0;
+ a[4527]=0;
+ a[4528]=0;
+ a[4529]=0;
+ a[4530]=0;
+ a[4531]=0;
+ a[4532]=0;
+ a[4533]=0;
+ a[4534]=0;
+ a[4535]=0;
+ a[4536]=0;
+ a[4537]=0;
+ a[4538]=0;
+ a[4539]=0;
+ a[4540]=0;
+ a[4541]=0;
+ a[4542]=0;
+ a[4543]=0;
+ a[4544]=0;
+ a[4545]=0;
+ a[4546]=0;
+ a[4547]=0;
+ a[4548]=0;
+ a[4549]=0;
+ a[4550]=0;
+ a[4551]=0;
+ a[4552]=0;
+ a[4553]=0;
+ a[4554]=0;
+ a[4555]=0;
+ a[4556]=0;
+ a[4557]=0;
+ a[4558]=0;
+ a[4559]=0;
+ a[4560]=0;
+ a[4561]=0;
+ a[4562]=0;
+ a[4563]=0;
+ a[4564]=0;
+ a[4565]=0;
+ a[4566]=0;
+ a[4567]=0;
+ a[4568]=0;
+ a[4569]=0;
+ a[4570]=0;
+ a[4571]=0;
+ a[4572]=0;
+ a[4573]=0;
+ a[4574]=0;
+ a[4575]=0;
+ a[4576]=0;
+ a[4577]=0;
+ a[4578]=0;
+ a[4579]=0;
+ a[4580]=0;
+ a[4581]=0;
+ a[4582]=0;
+ a[4583]=0;
+ a[4584]=0;
+ a[4585]=0;
+ a[4586]=0;
+ a[4587]=0;
+ a[4588]=0;
+ a[4589]=0;
+ a[4590]=0;
+ a[4591]=0;
+ a[4592]=0;
+ a[4593]=0;
+ a[4594]=0;
+ a[4595]=0;
+ a[4596]=0;
+ a[4597]=0;
+ a[4598]=0;
+ a[4599]=0;
+ a[4600]=0;
+ a[4601]=0;
+ a[4602]=0;
+ a[4603]=0;
+ a[4604]=0;
+ a[4605]=0;
+ a[4606]=0;
+ a[4607]=0;
+ a[4608]=0;
+ a[4609]=0;
+ a[4610]=0;
+ a[4611]=0;
+ a[4612]=0;
+ a[4613]=0;
+ a[4614]=0;
+ a[4615]=0;
+ a[4616]=0;
+ a[4617]=0;
+ a[4618]=0;
+ a[4619]=0;
+ a[4620]=0;
+ a[4621]=0;
+ a[4622]=0;
+ a[4623]=0;
+ a[4624]=0;
+ a[4625]=0;
+ a[4626]=0;
+ a[4627]=0;
+ a[4628]=0;
+ a[4629]=0;
+ a[4630]=0;
+ a[4631]=0;
+ a[4632]=0;
+ a[4633]=0;
+ a[4634]=0;
+ a[4635]=0;
+ a[4636]=0;
+ a[4637]=0;
+ a[4638]=0;
+ a[4639]=0;
+ a[4640]=0;
+ a[4641]=0;
+ a[4642]=0;
+ a[4643]=0;
+ a[4644]=0;
+ a[4645]=0;
+ a[4646]=0;
+ a[4647]=0;
+ a[4648]=0;
+ a[4649]=0;
+ a[4650]=0;
+ a[4651]=0;
+ a[4652]=0;
+ a[4653]=0;
+ a[4654]=0;
+ a[4655]=0;
+ a[4656]=0;
+ a[4657]=0;
+ a[4658]=0;
+ a[4659]=0;
+ a[4660]=0;
+ a[4661]=0;
+ a[4662]=0;
+ a[4663]=0;
+ a[4664]=0;
+ a[4665]=0;
+ a[4666]=0;
+ a[4667]=0;
+ a[4668]=0;
+ a[4669]=0;
+ a[4670]=0;
+ a[4671]=0;
+ a[4672]=0;
+ a[4673]=0;
+ a[4674]=0;
+ a[4675]=0;
+ a[4676]=0;
+ a[4677]=0;
+ a[4678]=0;
+ a[4679]=0;
+ a[4680]=0;
+ a[4681]=0;
+ a[4682]=0;
+ a[4683]=0;
+ a[4684]=0;
+ a[4685]=0;
+ a[4686]=0;
+ a[4687]=0;
+ a[4688]=0;
+ a[4689]=0;
+ a[4690]=0;
+ a[4691]=0;
+ a[4692]=0;
+ a[4693]=0;
+ a[4694]=0;
+ a[4695]=0;
+ a[4696]=0;
+ a[4697]=0;
+ a[4698]=0;
+ a[4699]=0;
+ a[4700]=0;
+ a[4701]=0;
+ a[4702]=0;
+ a[4703]=0;
+ a[4704]=0;
+ a[4705]=0;
+ a[4706]=0;
+ a[4707]=0;
+ a[4708]=0;
+ a[4709]=0;
+ a[4710]=0;
+ a[4711]=0;
+ a[4712]=0;
+ a[4713]=0;
+ a[4714]=0;
+ a[4715]=0;
+ a[4716]=0;
+ a[4717]=0;
+ a[4718]=0;
+ a[4719]=0;
+ a[4720]=0;
+ a[4721]=0;
+ a[4722]=0;
+ a[4723]=0;
+ a[4724]=0;
+ a[4725]=0;
+ a[4726]=0;
+ a[4727]=0;
+ a[4728]=0;
+ a[4729]=0;
+ a[4730]=0;
+ a[4731]=0;
+ a[4732]=0;
+ a[4733]=0;
+ a[4734]=0;
+ a[4735]=0;
+ a[4736]=0;
+ a[4737]=0;
+ a[4738]=0;
+ a[4739]=0;
+ a[4740]=0;
+ a[4741]=0;
+ a[4742]=0;
+ a[4743]=0;
+ a[4744]=0;
+ a[4745]=0;
+ a[4746]=0;
+ a[4747]=0;
+ a[4748]=0;
+ a[4749]=0;
+ a[4750]=0;
+ a[4751]=0;
+ a[4752]=0;
+ a[4753]=0;
+ a[4754]=0;
+ a[4755]=0;
+ a[4756]=0;
+ a[4757]=0;
+ a[4758]=0;
+ a[4759]=0;
+ a[4760]=0;
+ a[4761]=0;
+ a[4762]=0;
+ a[4763]=0;
+ a[4764]=0;
+ a[4765]=0;
+ a[4766]=0;
+ a[4767]=0;
+ a[4768]=0;
+ a[4769]=0;
+ a[4770]=0;
+ a[4771]=0;
+ a[4772]=0;
+ a[4773]=0;
+ a[4774]=0;
+ a[4775]=0;
+ a[4776]=0;
+ a[4777]=0;
+ a[4778]=0;
+ a[4779]=0;
+ a[4780]=0;
+ a[4781]=0;
+ a[4782]=0;
+ a[4783]=0;
+ a[4784]=0;
+ a[4785]=0;
+ a[4786]=0;
+ a[4787]=0;
+ a[4788]=0;
+ a[4789]=0;
+ a[4790]=0;
+ a[4791]=0;
+ a[4792]=0;
+ a[4793]=0;
+ a[4794]=0;
+ a[4795]=0;
+ a[4796]=0;
+ a[4797]=0;
+ a[4798]=0;
+ a[4799]=0;
+ a[4800]=0;
+ a[4801]=0;
+ a[4802]=0;
+ a[4803]=0;
+ a[4804]=0;
+ a[4805]=0;
+ a[4806]=0;
+ a[4807]=0;
+ a[4808]=0;
+ a[4809]=0;
+ a[4810]=0;
+ a[4811]=0;
+ a[4812]=0;
+ a[4813]=0;
+ a[4814]=0;
+ a[4815]=0;
+ a[4816]=0;
+ a[4817]=0;
+ a[4818]=0;
+ a[4819]=0;
+ a[4820]=0;
+ a[4821]=0;
+ a[4822]=0;
+ a[4823]=0;
+ a[4824]=0;
+ a[4825]=0;
+ a[4826]=0;
+ a[4827]=0;
+ a[4828]=0;
+ a[4829]=0;
+ a[4830]=0;
+ a[4831]=0;
+ a[4832]=0;
+ a[4833]=0;
+ a[4834]=0;
+ a[4835]=0;
+ a[4836]=0;
+ a[4837]=0;
+ a[4838]=0;
+ a[4839]=0;
+ a[4840]=0;
+ a[4841]=0;
+ a[4842]=0;
+ a[4843]=0;
+ a[4844]=0;
+ a[4845]=0;
+ a[4846]=0;
+ a[4847]=0;
+ a[4848]=0;
+ a[4849]=0;
+ a[4850]=0;
+ a[4851]=0;
+ a[4852]=0;
+ a[4853]=0;
+ a[4854]=0;
+ a[4855]=0;
+ a[4856]=0;
+ a[4857]=0;
+ a[4858]=0;
+ a[4859]=0;
+ a[4860]=0;
+ a[4861]=0;
+ a[4862]=0;
+ a[4863]=0;
+ a[4864]=0;
+ a[4865]=0;
+ a[4866]=0;
+ a[4867]=0;
+ a[4868]=0;
+ a[4869]=0;
+ a[4870]=0;
+ a[4871]=0;
+ a[4872]=0;
+ a[4873]=0;
+ a[4874]=0;
+ a[4875]=0;
+ a[4876]=0;
+ a[4877]=0;
+ a[4878]=0;
+ a[4879]=0;
+ a[4880]=0;
+ a[4881]=0;
+ a[4882]=0;
+ a[4883]=0;
+ a[4884]=0;
+ a[4885]=0;
+ a[4886]=0;
+ a[4887]=0;
+ a[4888]=0;
+ a[4889]=0;
+ a[4890]=0;
+ a[4891]=0;
+ a[4892]=0;
+ a[4893]=0;
+ a[4894]=0;
+ a[4895]=0;
+ a[4896]=0;
+ a[4897]=0;
+ a[4898]=0;
+ a[4899]=0;
+ a[4900]=0;
+ a[4901]=0;
+ a[4902]=0;
+ a[4903]=0;
+ a[4904]=0;
+ a[4905]=0;
+ a[4906]=0;
+ a[4907]=0;
+ a[4908]=0;
+ a[4909]=0;
+ a[4910]=0;
+ a[4911]=0;
+ a[4912]=0;
+ a[4913]=0;
+ a[4914]=0;
+ a[4915]=0;
+ a[4916]=0;
+ a[4917]=0;
+ a[4918]=0;
+ a[4919]=0;
+ a[4920]=0;
+ a[4921]=0;
+ a[4922]=0;
+ a[4923]=0;
+ a[4924]=0;
+ a[4925]=0;
+ a[4926]=0;
+ a[4927]=0;
+ a[4928]=0;
+ a[4929]=0;
+ a[4930]=0;
+ a[4931]=0;
+ a[4932]=0;
+ a[4933]=0;
+ a[4934]=0;
+ a[4935]=0;
+ a[4936]=0;
+ a[4937]=0;
+ a[4938]=0;
+ a[4939]=0;
+ a[4940]=0;
+ a[4941]=0;
+ a[4942]=0;
+ a[4943]=0;
+ a[4944]=0;
+ a[4945]=0;
+ a[4946]=0;
+ a[4947]=0;
+ a[4948]=0;
+ a[4949]=0;
+ a[4950]=0;
+ a[4951]=0;
+ a[4952]=0;
+ a[4953]=0;
+ a[4954]=0;
+ a[4955]=0;
+ a[4956]=0;
+ a[4957]=0;
+ a[4958]=0;
+ a[4959]=0;
+ a[4960]=0;
+ a[4961]=0;
+ a[4962]=0;
+ a[4963]=0;
+ a[4964]=0;
+ a[4965]=0;
+ a[4966]=0;
+ a[4967]=0;
+ a[4968]=0;
+ a[4969]=0;
+ a[4970]=0;
+ a[4971]=0;
+ a[4972]=0;
+ a[4973]=0;
+ a[4974]=0;
+ a[4975]=0;
+ a[4976]=0;
+ a[4977]=0;
+ a[4978]=0;
+ a[4979]=0;
+ a[4980]=0;
+ a[4981]=0;
+ a[4982]=0;
+ a[4983]=0;
+ a[4984]=0;
+ a[4985]=0;
+ a[4986]=0;
+ a[4987]=0;
+ a[4988]=0;
+ a[4989]=0;
+ a[4990]=0;
+ a[4991]=0;
+ a[4992]=0;
+ a[4993]=0;
+ a[4994]=0;
+ a[4995]=0;
+ a[4996]=0;
+ a[4997]=0;
+ a[4998]=0;
+ a[4999]=0;
+ return a;
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1973.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1973.js
new file mode 100644
index 0000000..8708bf1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1973.js
@@ -0,0 +1,52 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that getters and setters pass unwrapped this values in strict mode
+// and wrapped this values is non-strict mode.
+
+function TestAccessorWrapping(primitive) {
+ var prototype = Object.getPrototypeOf(Object(primitive))
+ // Check that strict mode passes unwrapped this value.
+ var strict_type = typeof primitive;
+ Object.defineProperty(prototype, "strict", {
+ get: function() { "use strict"; assertSame(strict_type, typeof this); },
+ set: function() { "use strict"; assertSame(strict_type, typeof this); }
+ });
+ primitive.strict = primitive.strict;
+ // Check that non-strict mode passes wrapped this value.
+ var sloppy_type = typeof Object(primitive);
+ Object.defineProperty(prototype, "sloppy", {
+ get: function() { assertSame(sloppy_type, typeof this); },
+ set: function() { assertSame(sloppy_type, typeof this); }
+ });
+ primitive.sloppy = primitive.sloppy;
+}
+
+TestAccessorWrapping(true);
+TestAccessorWrapping(0);
+TestAccessorWrapping({});
+TestAccessorWrapping("");
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-1980.js b/src/3rdparty/v8/test/mjsunit/regress/regress-1980.js
new file mode 100644
index 0000000..49dfd06
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-1980.js
@@ -0,0 +1,40 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=1980
+
+var invalid_this = [ "invalid", 23, undefined, null ];
+for (var i = 0; i < invalid_this.length; i++) {
+ var exception = false;
+ try {
+ Error.prototype.toString.call(invalid_this[i]);
+ } catch (e) {
+ exception = true;
+ assertTrue("called_on_non_object" == e.type);
+ }
+ assertTrue(exception);
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2027.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2027.js
new file mode 100644
index 0000000..00ed03f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2027.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var d = new Date(2010, 1, 1);
+
+function Check(time) {
+ assertEquals(d.getTime(), time);
+}
+
+Check(d.setMilliseconds(10));
+Check(d.setSeconds(10));
+Check(d.setMinutes(10));
+Check(d.setHours(10));
+Check(d.setDate(10));
+Check(d.setMonth(10));
+Check(d.setFullYear(2010));
+Check(d.setUTCMilliseconds(10));
+Check(d.setUTCSeconds(10));
+Check(d.setUTCMinutes(10));
+Check(d.setUTCHours(10));
+Check(d.setUTCDate(10));
+Check(d.setUTCMonth(10));
+Check(d.setUTCFullYear(2010));
+
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2030.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2030.js
new file mode 100644
index 0000000..fb5a3d0
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2030.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function a() {
+ this.x = 1;
+}
+var aa = new a();
+%DebugPrint(aa);
+
+function b() {
+ this.z = 23;
+ this.x = 2;
+}
+var bb = new b();
+%DebugPrint(bb);
+
+function f(o) {
+ return o.x;
+}
+
+assertSame(1, f(aa));
+assertSame(1, f(aa));
+assertSame(2, f(bb));
+assertSame(2, f(bb));
+%OptimizeFunctionOnNextCall(f);
+assertSame(1, f(aa));
+assertSame(2, f(bb));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2032.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2032.js
new file mode 100644
index 0000000..ad6408d
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2032.js
@@ -0,0 +1,64 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// See: http://code.google.com/p/v8/issues/detail?id=2032
+
+// Case independent regexp that ends on the first character in a block.
+assertTrue(/[@-A]/i.test("a"));
+assertTrue(/[@-A]/i.test("A"));
+assertTrue(/[@-A]/i.test("@"));
+
+assertFalse(/[@-A]/.test("a"));
+assertTrue(/[@-A]/.test("A"));
+assertTrue(/[@-A]/.test("@"));
+
+assertFalse(/[¿-À]/i.test('¾'));
+assertTrue(/[¿-À]/i.test('¿'));
+assertTrue(/[¿-À]/i.test('À'));
+assertTrue(/[¿-À]/i.test('à'));
+assertFalse(/[¿-À]/i.test('á'));
+assertFalse(/[¿-À]/i.test('Á'));
+
+assertFalse(/[¿-À]/.test('¾'));
+assertTrue(/[¿-À]/.test('¿'));
+assertTrue(/[¿-À]/.test('À'));
+assertFalse(/[¿-À]/.test('à'));
+assertFalse(/[¿-À]/.test('á'));
+assertFalse(/[¿-À]/.test('á'));
+assertFalse(/[¿-À]/i.test('Á'));
+
+assertFalse(/[Ö-×]/i.test('Õ'));
+assertTrue(/[Ö-×]/i.test('Ö'));
+assertTrue(/[Ö-×]/i.test('ö'));
+assertTrue(/[Ö-×]/i.test('×'));
+assertFalse(/[Ö-×]/i.test('Ø'));
+
+assertFalse(/[Ö-×]/.test('Õ'));
+assertTrue(/[Ö-×]/.test('Ö'));
+assertFalse(/[Ö-×]/.test('ö'));
+assertTrue(/[Ö-×]/.test('×'));
+assertFalse(/[Ö-×]/.test('Ø'));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2034.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2034.js
new file mode 100644
index 0000000..c510f97
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2034.js
@@ -0,0 +1,46 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-collections
+
+var key = {};
+var map = new WeakMap;
+Object.preventExtensions(key);
+
+// Try querying using frozen key.
+assertFalse(map.has(key));
+assertSame(undefined, map.get(key));
+
+// Try adding using frozen key.
+map.set(key, 1);
+assertTrue(map.has(key));
+assertSame(1, map.get(key));
+
+// Try deleting using frozen key.
+map.delete(key, 1);
+assertFalse(map.has(key));
+assertSame(undefined, map.get(key));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2045.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2045.js
new file mode 100644
index 0000000..822ee1f
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2045.js
@@ -0,0 +1,49 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+function foo() {
+ assertEquals(2, arguments.length);
+}
+
+function bar() {
+ G.x;
+ return foo.apply(this, arguments);
+}
+
+function baz() {
+ return bar(1, 2);
+}
+
+G = {x: 0};
+baz();
+baz();
+%OptimizeFunctionOnNextCall(baz);
+baz();
+delete G.x;
+baz();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2054.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2054.js
new file mode 100644
index 0000000..97b989c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2054.js
@@ -0,0 +1,34 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that we can correctly optimize top level code that contains a
+// throw (or return) as it's last statement.
+
+var N = 1e5; // Number of iterations that trigger optimization.
+for (var i = 0; i < N; i++) {
+ if (i > N) throw new Error;
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2055.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2055.js
new file mode 100644
index 0000000..1eaf62c
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2055.js
@@ -0,0 +1,48 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that array literal boilerplate objects can be transitioned while
+// existing un-transitioned clones are still being populated.
+
+function test1(depth) {
+ if (--depth < 0) {
+ return [];
+ } else {
+ return [ 0, test1(depth) ];
+ }
+}
+assertEquals([0,[0,[]]], test1(2));
+
+function test2(depth) {
+ if (--depth < 0) {
+ return [];
+ } else {
+ var o = [ 0, test2(depth) ];
+ return (depth == 0) ? 0.5 : o;
+ }
+}
+assertEquals([0,0.5], test2(2));
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2056.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2056.js
new file mode 100644
index 0000000..d34a750
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2056.js
@@ -0,0 +1,66 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+var cases = [
+ [0.0, 0.0, 0.0, 0,0],
+ [undefined, 0.0, NaN, NaN],
+ [0.0, undefined, NaN, NaN],
+ [NaN, 0.0, NaN, NaN],
+ [0.0, NaN, NaN, NaN],
+ [-NaN, 0.0, NaN, NaN],
+ [0.0, -NaN, NaN, NaN],
+ [Infinity, 0.0, Infinity, 0.0],
+ [0.0, Infinity, Infinity, 0.0],
+ [-Infinity, 0.0, 0.0, -Infinity],
+ [0.0, -Infinity, 0.0, -Infinity]
+];
+
+function do_min(a, b) {
+ return Math.min(a, b);
+}
+
+function do_max(a, b) {
+ return Math.max(a, b);
+}
+
+// Make sure that non-crankshaft results match expectations.
+for (i = 0; i < cases.length; ++i) {
+ var c = cases[i];
+ assertEquals(c[3], do_min(c[0], c[1]));
+ assertEquals(c[2], do_max(c[0], c[1]));
+}
+
+// Make sure that crankshaft results match expectations.
+for (i = 0; i < cases.length; ++i) {
+ var c = cases[i];
+ %OptimizeFunctionOnNextCall(do_min);
+ %OptimizeFunctionOnNextCall(do_max);
+ assertEquals(c[3], do_min(c[0], c[1]));
+ assertEquals(c[2], do_max(c[0], c[1]));
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-2058.js b/src/3rdparty/v8/test/mjsunit/regress/regress-2058.js
new file mode 100644
index 0000000..9a69ea1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-2058.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// See http://code.google.com/p/v8/issues/detail?id=2058
+
+// A match after a replace with a function argument needs to reset
+// the flag that determines whether we are using indices or substrings
+// to indicate the last match.
+"Now is the".replace(/Now (\w+) the/g, function() {
+ "foo bar".match(/( )/);
+ assertEquals(RegExp.$1, " ");
+})
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-397.js b/src/3rdparty/v8/test/mjsunit/regress/regress-397.js
index 111f4a6..0e4143d 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-397.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-397.js
@@ -25,10 +25,19 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
// See http://code.google.com/p/v8/issues/detail?id=397
-assertEquals("Infinity", String(Math.pow(Infinity, 0.5)));
-assertEquals(0, Math.pow(Infinity, -0.5));
-assertEquals("Infinity", String(Math.pow(-Infinity, 0.5)));
-assertEquals(0, Math.pow(-Infinity, -0.5));
+function test() {
+ assertEquals("Infinity", String(Math.pow(Infinity, 0.5)));
+ assertEquals(0, Math.pow(Infinity, -0.5));
+
+ assertEquals("Infinity", String(Math.pow(-Infinity, 0.5)));
+ assertEquals(0, Math.pow(-Infinity, -0.5));
+}
+
+test();
+test();
+%OptimizeFunctionOnNextCall(test);
+test();
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-91517.js b/src/3rdparty/v8/test/mjsunit/regress/regress-91517.js
deleted file mode 100644
index 68a768c..0000000
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-91517.js
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Getting property names of an object with a prototype chain that
-// triggers dictionary elements in GetLocalPropertyNames() shouldn't
-// crash the runtime
-
-// Flags: --allow-natives-syntax
-
-function Object1() {
- this.foo = 1;
-}
-
-function Object2() {
- this.fuz = 2;
- this.objects = new Object();
- this.fuz1 = 2;
- this.fuz2 = 2;
- this.fuz3 = 2;
- this.fuz4 = 2;
- this.fuz5 = 2;
- this.fuz6 = 2;
- this.fuz7 = 2;
- this.fuz8 = 2;
- this.fuz9 = 2;
- this.fuz10 = 2;
- this.fuz11 = 2;
- this.fuz12 = 2;
- this.fuz13 = 2;
- this.fuz14 = 2;
- this.fuz15 = 2;
- this.fuz16 = 2;
- this.fuz17 = 2;
- // Force dictionary-based properties
- for (x=1;x<1000;x++) {
- this["sdf" + x] = 2;
- }
-}
-
-function Object3() {
- this.boo = 3;
-}
-
-function Object4() {
- this.baz = 4;
-}
-
-obj1 = new Object1();
-obj2 = new Object2();
-obj3 = new Object3();
-obj4 = new Object4();
-
-%SetHiddenPrototype(obj4, obj3);
-%SetHiddenPrototype(obj3, obj2);
-%SetHiddenPrototype(obj2, obj1);
-
-function contains(a, obj) {
- for(var i = 0; i < a.length; i++) {
- if(a[i] === obj){
- return true;
- }
- }
- return false;
-}
-names = %GetLocalPropertyNames(obj4);
-assertEquals(1021, names.length);
-assertTrue(contains(names, "baz"));
-assertTrue(contains(names, "boo"));
-assertTrue(contains(names, "foo"));
-assertTrue(contains(names, "fuz"));
-assertTrue(contains(names, "fuz1"));
-assertTrue(contains(names, "fuz2"));
-assertTrue(contains(names, "fuz3"));
-assertTrue(contains(names, "fuz4"));
-assertTrue(contains(names, "fuz5"));
-assertTrue(contains(names, "fuz6"));
-assertTrue(contains(names, "fuz7"));
-assertTrue(contains(names, "fuz8"));
-assertTrue(contains(names, "fuz9"));
-assertTrue(contains(names, "fuz10"));
-assertTrue(contains(names, "fuz11"));
-assertTrue(contains(names, "fuz12"));
-assertTrue(contains(names, "fuz13"));
-assertTrue(contains(names, "fuz14"));
-assertTrue(contains(names, "fuz15"));
-assertTrue(contains(names, "fuz16"));
-assertTrue(contains(names, "fuz17"));
-assertFalse(names[1020] == undefined);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-95113.js b/src/3rdparty/v8/test/mjsunit/regress/regress-95113.js
index f01b270..468bff8 100644
--- a/src/3rdparty/v8/test/mjsunit/regress/regress-95113.js
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-95113.js
@@ -32,7 +32,7 @@ function get_double_array() {
var i = 0;
while (!%HasFastDoubleElements(a)) {
a[i] = i;
- i++;
+ i += 0.5;
}
assertTrue(%HasFastDoubleElements(a));
a.length = 1;
diff --git a/src/3rdparty/v8/src/extensions/experimental/collator.h b/src/3rdparty/v8/test/mjsunit/regress/regress-97116.js
index ca7e4dc..b858ca5 100644
--- a/src/3rdparty/v8/src/extensions/experimental/collator.h
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-97116.js
@@ -25,44 +25,26 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifndef V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H
-#define V8_EXTENSIONS_EXPERIMENTAL_COLLATOR_H_
+// Flags: --expose-gc --allow-natives-syntax
-#include "include/v8.h"
+// Check that we are not flushing code for inlined functions that
+// have a pending lazy deoptimization on the stack.
-#include "unicode/uversion.h"
-
-namespace U_ICU_NAMESPACE {
-class Collator;
-class UnicodeString;
+function deopt() {
+ try { } catch (e) { } // Avoid inlining.
+ %DeoptimizeFunction(outer);
+ for (var i = 0; i < 10; i++) gc(); // Force code flushing.
}
-namespace v8 {
-namespace internal {
-
-class Collator {
- public:
- static v8::Handle<v8::Value> JSCollator(const v8::Arguments& args);
-
- // Helper methods for various bindings.
-
- // Unpacks collator object from corresponding JavaScript object.
- static icu::Collator* UnpackCollator(v8::Handle<v8::Object> obj);
-
- // Release memory we allocated for the Collator once the JS object that
- // holds the pointer gets garbage collected.
- static void DeleteCollator(v8::Persistent<v8::Value> object, void* param);
-
- // Compare two strings and returns -1, 0 and 1 depending on
- // whether string1 is smaller than, equal to or larger than string2.
- static v8::Handle<v8::Value> CollatorCompare(const v8::Arguments& args);
-
- private:
- Collator() {}
-
- static v8::Persistent<v8::FunctionTemplate> collator_template_;
-};
+function outer(should_deopt) {
+ inner(should_deopt);
+}
-} } // namespace v8::internal
+function inner(should_deopt) {
+ if (should_deopt) deopt();
+}
-#endif // V8_EXTENSIONS_EXPERIMENTAL_COLLATOR
+outer(false);
+outer(false);
+%OptimizeFunctionOnNextCall(outer);
+outer(true);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-100859.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-100859.js
new file mode 100644
index 0000000..6824426
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-100859.js
@@ -0,0 +1,39 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This used to trigger a crash because of an unhandled stack overflow.
+function setx() {
+ setx(typeof new Uint16Array('x') === 'object');
+}
+var exception = false;
+try {
+ setx();
+} catch (ex) {
+ assertTrue(ex instanceof RangeError);
+ exception = true;
+}
+assertTrue(exception);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-107996.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-107996.js
new file mode 100644
index 0000000..dfe07e5
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-107996.js
@@ -0,0 +1,64 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+
+Debug = debug.Debug;
+
+Debug.setListener(listener);
+
+var fourteen;
+var four_in_debugger = [];
+
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ for (var i = 0; i < exec_state.frameCount(); i++) {
+ var frame = exec_state.frame(i);
+ four_in_debugger[i] = frame.evaluate("four", false).value();
+ }
+ }
+}
+
+function f1() {
+ var three = 3;
+ var four = 4;
+ (function f2() {
+ var seven = 7;
+ (function f3() {
+ debugger;
+ fourteen = three + four + seven;
+ })();
+ })();
+}
+
+f1();
+assertEquals(14, fourteen);
+assertEquals(4, four_in_debugger[0]);
+assertEquals(4, four_in_debugger[1]);
+assertEquals(4, four_in_debugger[2]);
+
+Debug.setListener(null);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js
new file mode 100644
index 0000000..26b84fa
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-crbug-119926.js
@@ -0,0 +1,33 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that array elements don't break upon garbage collection.
+
+var a = new Array(500);
+for (var i = 0; i < 500000; i++) {
+ a[i] = new Object();
+}
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js b/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js
new file mode 100644
index 0000000..1a608b1
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-debug-code-recompilation.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --hydrogen-filter=Debug.setBreakPoint --expose-debug-as debug
+
+Debug = debug.Debug
+
+function f() {a=1;b=2};
+function g() {
+ a=1;
+ b=2;
+}
+
+bp = Debug.setBreakPoint(f, 0, 0);
+Debug.clearBreakPoint(bp);
+%OptimizeFunctionOnNextCall(Debug.setBreakPoint);
+bp = Debug.setBreakPoint(f, 0, 0);
+Debug.clearBreakPoint(bp);
+bp = Debug.setBreakPoint(f, 0, 0);
+Debug.clearBreakPoint(bp);
+%OptimizeFunctionOnNextCall(Debug.setBreakPoint);
+bp = Debug.setBreakPoint(f, 0, 0);
+Debug.clearBreakPoint(bp);
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-inlining-function-literal-context.js b/src/3rdparty/v8/test/mjsunit/regress/regress-inlining-function-literal-context.js
new file mode 100644
index 0000000..9b7f7ac
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-inlining-function-literal-context.js
@@ -0,0 +1,53 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --expose-gc
+
+function mkbaz(x) {
+ function baz() {
+ return function () {
+ return [x];
+ }
+ }
+ return baz;
+}
+
+var baz = mkbaz(1);
+
+function foo() {
+ var f = baz();
+ return f();
+}
+
+// Tenure.
+gc();
+gc();
+
+assertArrayEquals([1], foo());
+assertArrayEquals([1], foo());
+%OptimizeFunctionOnNextCall(foo);
+assertArrayEquals([1], foo());
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js b/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js
new file mode 100644
index 0000000..a9a6d89
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-smi-only-concat.js
@@ -0,0 +1,37 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// This tests that concatenating a fast smi-only array and a fast object array
+// results in a fast object array.
+
+var fast_array = ['a', 'b'];
+var array = fast_array.concat(fast_array);
+
+assertTrue(%HasFastElements(fast_array));
+assertTrue(%HasFastElements(array)); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/regress/regress-sqrt.js b/src/3rdparty/v8/test/mjsunit/regress/regress-sqrt.js
new file mode 100644
index 0000000..f2a7e55
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/regress/regress-sqrt.js
@@ -0,0 +1,47 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// Check that Math.sqrt returns the same value regardless of being
+// optimized or not.
+
+function f(x) {
+ return Math.sqrt(x);
+}
+
+var x = 7.0506280066499245e-233;
+
+var a = f(x);
+
+f(0.1);
+f(0.2);
+%OptimizeFunctionOnNextCall(f);
+
+var b = f(x);
+
+assertEquals(a, b);
diff --git a/src/3rdparty/v8/test/mjsunit/string-external-cached.js b/src/3rdparty/v8/test/mjsunit/string-external-cached.js
new file mode 100644
index 0000000..6e24285
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/string-external-cached.js
@@ -0,0 +1,121 @@
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-externalize-string --expose-gc
+// Test data pointer caching of external strings.
+
+function test() {
+ // Test string.charAt.
+ var charat_str = new Array(5);
+ charat_str[0] = "0123456789ABCDEF0123456789ABCDEF\
+0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF\
+0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF\
+0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF\
+0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF";
+ charat_str[1] = "0123456789ABCDEF";
+ for (var i = 0; i < 6; i++) charat_str[1] += charat_str[1];
+ try { // String can only be externalized once
+ externalizeString(charat_str[0], false);
+ externalizeString(charat_str[1], true);
+ } catch (ex) { }
+ charat_str[2] = charat_str[0].slice(0, -1);
+ charat_str[3] = charat_str[1].slice(0, -1);
+ charat_str[4] = charat_str[0] + charat_str[0];
+
+ for (var i = 0; i < 5; i++) {
+ assertEquals('B', charat_str[i].charAt(6*16 + 11));
+ assertEquals('C', charat_str[i].charAt(6*16 + 12));
+ assertEquals('A', charat_str[i].charAt(3*16 + 10));
+ assertEquals('B', charat_str[i].charAt(3*16 + 11));
+ }
+
+ charat_short = "012";
+ try { // String can only be externalized once
+ externalizeString(charat_short, true);
+ } catch (ex) { }
+ assertEquals("1", charat_short.charAt(1));
+
+ // Test regexp and short substring.
+ var re = /(A|B)/;
+ var rere = /(T.{1,2}B)/;
+ var ascii = "ABCDEFGHIJKLMNOPQRST";
+ var twobyte = "_ABCDEFGHIJKLMNOPQRST";
+ try {
+ externalizeString(ascii, false);
+ externalizeString(twobyte, true);
+ } catch (ex) { }
+ assertTrue(isAsciiString(ascii));
+ assertFalse(isAsciiString(twobyte));
+ var ascii_slice = ascii.slice(1,-1);
+ var twobyte_slice = twobyte.slice(2,-1);
+ var ascii_cons = ascii + ascii;
+ var twobyte_cons = twobyte + twobyte;
+ for (var i = 0; i < 2; i++) {
+ assertEquals(["A", "A"], re.exec(ascii));
+ assertEquals(["B", "B"], re.exec(ascii_slice));
+ assertEquals(["TAB", "TAB"], rere.exec(ascii_cons));
+ assertEquals(["A", "A"], re.exec(twobyte));
+ assertEquals(["B", "B"], re.exec(twobyte_slice));
+ assertEquals(["T_AB", "T_AB"], rere.exec(twobyte_cons));
+ assertEquals("DEFG", ascii_slice.substr(2, 4));
+ assertEquals("DEFG", twobyte_slice.substr(2, 4));
+ assertEquals("DEFG", ascii_cons.substr(3, 4));
+ assertEquals("DEFG", twobyte_cons.substr(4, 4));
+ }
+
+ // Test adding external strings
+ var short_ascii = "E=";
+ var long_ascii = "MCsquared";
+ var short_twobyte = "E\u1234";
+ var long_twobyte = "MCsquare\u1234";
+ try { // String can only be externalized once
+ externalizeString(short_ascii, false);
+ externalizeString(long_ascii, false);
+ externalizeString(short_twobyte, true);
+ externalizeString(long_twobyte, true);
+ assertTrue(isAsciiString(short_asii) && isAsciiString(long_ascii));
+ assertFalse(isAsciiString(short_twobyte) || isAsciiString(long_twobyte));
+ } catch (ex) { }
+ assertEquals("E=MCsquared", short_ascii + long_ascii);
+ assertTrue(isAsciiString(short_ascii + long_ascii));
+ assertEquals("MCsquaredE=", long_ascii + short_ascii);
+ assertEquals("E\u1234MCsquare\u1234", short_twobyte + long_twobyte);
+ assertFalse(isAsciiString(short_twobyte + long_twobyte));
+ assertEquals("E=MCsquared", "E=" + long_ascii);
+ assertEquals("E\u1234MCsquared", short_twobyte + "MCsquared");
+ assertEquals("E\u1234MCsquared", short_twobyte + long_ascii);
+ assertFalse(isAsciiString(short_twobyte + long_ascii));
+}
+
+// Run the test many times to ensure IC-s don't break things.
+for (var i = 0; i < 10; i++) {
+ test();
+}
+
+// Clean up string to make Valgrind happy.
+gc();
+gc();
diff --git a/src/3rdparty/v8/test/mjsunit/string-externalize.js b/src/3rdparty/v8/test/mjsunit/string-externalize.js
index da89786..d52a7e2 100644
--- a/src/3rdparty/v8/test/mjsunit/string-externalize.js
+++ b/src/3rdparty/v8/test/mjsunit/string-externalize.js
@@ -44,7 +44,7 @@ function test() {
assertFalse(isAsciiString(twoByteExternalWithAsciiData));
var realTwoByteExternalString =
- "\u1234\u1234" + (function() { return "\u1234"; })();
+ "\u1234\u1234\u1234\u1234" + (function() { return "\u1234"; })();
externalizeString(realTwoByteExternalString);
assertFalse(isAsciiString(realTwoByteExternalString));
diff --git a/src/3rdparty/v8/test/mjsunit/string-replace-one-char.js b/src/3rdparty/v8/test/mjsunit/string-replace-one-char.js
new file mode 100644
index 0000000..cb4167b
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/string-replace-one-char.js
@@ -0,0 +1,92 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Make sure the strings are long enough to trigger the one-char string replace.
+var prefix1024 = "0123456789ABCDEF";
+for (var i = 0; i < 6; i++) prefix1024 += prefix1024;
+
+function test_replace(result, expected, search, replace) {
+ assertEquals(expected, result.replace(search, replace));
+}
+
+// '$' in the replace string.
+test_replace(prefix1024 + "abcdefghijklmnopqrstuvwxyz",
+ prefix1024 + "abcdefghijk#l#mnopqrstuvwxyz",
+ "l", "#$&#");
+
+test_replace(prefix1024 + "abcdefghijklmnopqrstuvwxyz\u1234",
+ prefix1024 + "abcdefghijk\u2012l\u2012mnopqrstuvwxyz\u1234",
+ "l", "\u2012$&\u2012");
+
+test_replace(prefix1024 + "abcdefghijklmnopqrstuvwxyz",
+ prefix1024 + "abcdefghijk$mnopqrstuvwxyz",
+ "l", "$$");
+
+test_replace(prefix1024 + "abcdefghijklmnopqrstuvwxyz\u1234",
+ prefix1024 + "abcdefghijk$mnopqrstuvwxyz\u1234",
+ "l", "$$");
+
+// Zero length replace string.
+test_replace(prefix1024 + "abcdefghijklmnopqrstuvwxyz",
+ prefix1024 + "abcdefghijklmnopqstuvwxyz",
+ "r", "");
+
+test_replace(prefix1024 + "abcdefghijklmnopq\u1234stuvwxyz",
+ prefix1024 + "abcdefghijklmnopqstuvwxyz",
+ "\u1234", "");
+
+// Search char not found.
+var not_found_1 = prefix1024 + "abcdefghijklmnopqrstuvwxyz";
+test_replace(not_found_1, not_found_1, "+", "-");
+
+var not_found_2 = prefix1024 + "abcdefghijklm\u1234nopqrstuvwxyz";
+test_replace(not_found_2, not_found_2, "+", "---");
+
+var not_found_3 = prefix1024 + "abcdefghijklmnopqrstuvwxyz";
+test_replace(not_found_3, not_found_3, "\u1234", "ZZZ");
+
+// Deep cons tree.
+var nested_1 = "";
+for (var i = 0; i < 100000; i++) nested_1 += "y";
+var nested_1_result = prefix1024 + nested_1 + "aa";
+nested_1 = prefix1024 + nested_1 + "z";
+test_replace(nested_1, nested_1_result, "z", "aa");
+
+var nested_2 = "\u2244";
+for (var i = 0; i < 100000; i++) nested_2 += "y";
+var nested_2_result = prefix1024 + nested_2 + "aa";
+nested_2 = prefix1024 + nested_2 + "\u2012";
+test_replace(nested_2, nested_2_result, "\u2012", "aa");
+
+// Sliced string as input. A cons string is always flattened before sliced.
+var slice_1 = ("ab" + prefix1024 + "cdefghijklmnopqrstuvwxyz").slice(1, -1);
+var slice_1_result = "b" + prefix1024 + "cdefghijklmnopqrstuvwxQ";
+test_replace(slice_1, slice_1_result, "y", "Q");
+
+var slice_2 = (prefix1024 + "abcdefghijklmno\u1234\u1234p").slice(1, -1);
+var slice_2_result = prefix1024.substr(1) + "abcdefghijklmnoQ\u1234";
+test_replace(slice_2, slice_2_result, "\u1234", "Q");
diff --git a/src/3rdparty/v8/test/mjsunit/string-slices-regexp.js b/src/3rdparty/v8/test/mjsunit/string-slices-regexp.js
index df01574..98b8ef9 100644
--- a/src/3rdparty/v8/test/mjsunit/string-slices-regexp.js
+++ b/src/3rdparty/v8/test/mjsunit/string-slices-regexp.js
@@ -24,11 +24,6 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// Flags: --string-slices
-
-//assertEquals('345"12345 6"1234567"123',
-// '12345""12345 6""1234567""1234'.slice(2,-1).replace(/""/g, '"'));
var foo = "lsdfj sldkfj sdklfj læsdfjl sdkfjlsdk fjsdl fjsdljskdj flsj flsdkj flskd regexp: /foobar/\nldkfj sdlkfj sdkl";
for(var i = 0; i < 1000; i++) {
diff --git a/src/3rdparty/v8/test/mjsunit/string-slices.js b/src/3rdparty/v8/test/mjsunit/string-slices.js
index 7c40229..5b1dc36 100755
--- a/src/3rdparty/v8/test/mjsunit/string-slices.js
+++ b/src/3rdparty/v8/test/mjsunit/string-slices.js
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --string-slices --expose-externalize-string
+// Flags: --expose-externalize-string --allow-natives-syntax
var s = 'abcdefghijklmn';
assertEquals(s, s.substr());
@@ -100,14 +100,7 @@ for (var i = 5; i < 25; i++) {
// Keep creating strings to to force allocation failure on substring creation.
var x = "0123456789ABCDEF";
-x += x; // 2^5
-x += x;
-x += x;
-x += x;
-x += x;
-x += x; // 2^10
-x += x;
-x += x;
+for (var i = 0; i < 8; i++) x += x;
var xl = x.length;
var cache = [];
for (var i = 0; i < 1000; i++) {
@@ -119,14 +112,7 @@ for (var i = 0; i < 1000; i++) {
// Same with two-byte strings
var x = "\u2028123456789ABCDEF";
-x += x; // 2^5
-x += x;
-x += x;
-x += x;
-x += x;
-x += x; // 2^10
-x += x;
-x += x;
+for (var i = 0; i < 8; i++) x += x;
var xl = x.length;
var cache = [];
for (var i = 0; i < 1000; i++) {
@@ -174,6 +160,23 @@ for ( var i = 0; i < 1000; i++) {
f(flat, cons, slice, i);
}
+// Short substrings.
+flat = "abcdefghijkl12345";
+cons = flat + flat.toUpperCase();
+/x/.exec(cons); // Flatten cons
+slice = "abcdefghijklmn12345".slice(1, -1);
+assertEquals("cdefg", flat.substr(2, 5));
+assertEquals("cdefg", cons.substr(2, 5));
+assertEquals("cdefg", slice.substr(1, 5));
+
+flat = "abc\u1234defghijkl12345";
+cons = flat + flat.toUpperCase();
+/x/.exec(cons); // Flatten cons
+slice = "abc\u1234defghijklmn12345".slice(1, -1);
+assertEquals("c\u1234def", flat.substr(2, 5));
+assertEquals("c\u1234def", cons.substr(2, 5));
+assertEquals("c\u1234def", slice.substr(1, 5));
+
// Concatenate substrings.
var ascii = 'abcdefghijklmnop';
var utf = '\u03B1\u03B2\u03B3\u03B4\u03B5\u03B6\u03B7\u03B8\u03B9\u03BA\u03BB';
@@ -202,3 +205,21 @@ assertEquals(a.slice(1,-1), b);
assertTrue(/3456789qwe/.test(a));
assertEquals(5, a.indexOf("678"));
assertEquals("12345", a.split("6")[0]);
+
+// Create a slice with an external string as parent string.
+var c = a.slice(1,-1);
+
+function test_crankshaft() {
+ for (var i = 0; i < 20; i++) {
+ assertEquals(b.charAt(i), a.charAt(i + 1));
+ assertEquals(b.charAt(i), c.charAt(i));
+ assertEquals(b.charAt(4), c.charAt(4));
+ assertTrue(/3456789qwe/.test(c));
+ assertEquals(4, c.indexOf("678"));
+ assertEquals("2345", c.split("6")[0]);
+ }
+}
+
+test_crankshaft();
+%OptimizeFunctionOnNextCall(test_crankshaft);
+test_crankshaft(); \ No newline at end of file
diff --git a/src/3rdparty/v8/test/mjsunit/switch.js b/src/3rdparty/v8/test/mjsunit/switch.js
index 180f994..6a61fe5 100644
--- a/src/3rdparty/v8/test/mjsunit/switch.js
+++ b/src/3rdparty/v8/test/mjsunit/switch.js
@@ -25,6 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax
+
function f0() {
switch (0) {
// switch deliberately left empty
@@ -126,6 +128,42 @@ assertEquals(3, f4(1), "fallthrough-switch.1");
assertEquals(3, f4(2), "fallthrough-switch.2");
assertEquals(5, f4(3), "fallthrough-switch.3");
+function f4_string(tag, x) {
+ switch(tag) {
+ case 'zero':
+ x++;
+ case 'two':
+ x++;
+ }
+ return x;
+}
+
+// Symbols
+assertEquals(2, f4_string('zero', 0), "fallthrough-string-switch.0");
+assertEquals(1, f4_string('one', 1), "fallthrough-string-switch.1");
+assertEquals(3, f4_string('two', 2), "fallthrough-string-switch.2");
+
+// Strings
+assertEquals(2, f4_string('_zero'.slice(1), 0), "fallthrough-string-switch.3");
+assertEquals(1, f4_string('_one'.slice(1), 1), "fallthrough-string-switch.4");
+assertEquals(3, f4_string('_two'.slice(1), 2), "fallthrough-string-switch.5");
+
+// Oddball
+assertEquals(3, f4_string(null, 3), "fallthrough-string-switch.6");
+
+// Test for regression
+function regress_string(value) {
+ var json = 1;
+ switch (typeof value) {
+ case 'object':
+ break;
+
+ default:
+
+ }
+ return json;
+};
+assertEquals(1, regress_string('object'), 'regression-string');
function f5(x) {
switch(x) {
@@ -287,3 +325,138 @@ var verylong_size = 1000;
var verylong = makeVeryLong(verylong_size);
assertEquals(verylong_size * 2 + 1, verylong());
+
+//
+// Test suite below aims to cover all possible combinations of following:
+//
+// clauses | tags | type feedback | optimization
+// =========================================================
+// strings | symbol | all | on
+// smis | string | target | off
+// mixed | oddball | non-target |
+// | smis | none |
+// | heapnum | |
+// =========================================================
+
+// Function-with-switch generator
+var test_id = 0,
+ clause_values = {
+ string: ['abc', 'def', 'ghi', 'jkl'],
+ smi: [1, 2, 3, 4],
+ mixed: ['abc', 1, 'def', 2, 'ghi', 3, 'jkl', 4]
+ };
+
+function switch_gen(clause_type, feedback, optimize) {
+ var values = clause_values[clause_type];
+
+ function opt(fn) {
+ if (feedback === 'all') {
+ values.forEach(fn);
+ } else if (Array.isArray(feedback)) {
+ // Non-target
+ values.filter(function(v) {
+ return feedback.indexOf(v) === -1;
+ }).forEach(fn);
+ } else if (feedback !== undefined) {
+ // Target
+ fn(feedback);
+ } else {
+ // None
+ }
+
+ if (optimize) %OptimizeFunctionOnNextCall(fn);
+
+ return fn;
+ };
+
+ return opt(new Function(
+ 'tag',
+ '"' + (test_id++) + '";' +
+ 'switch(tag) {' +
+ values.map(function(value) {
+ return 'case ' + JSON.stringify(value) + ': return' +
+ JSON.stringify('ok-' + value);
+ }).join(';') +
+ '}'
+ ));
+};
+
+function test_switch(clause_type, test_type, feedback, optimize) {
+ var pairs = [],
+ fn = switch_gen(clause_type, feedback, optimize);
+
+ if (Array.isArray(test_type)) {
+ pairs = test_type.map(function(v) {
+ return {
+ value: v,
+ expected: 'ok-' + v
+ };
+ });
+ } else if (test_type === 'symbols') {
+ pairs = clause_values.string.map(function(v) {
+ return {
+ value: v,
+ expected: clause_type !== 'smi' ? 'ok-' + v : undefined
+ };
+ });
+ } else if (test_type === 'strings') {
+ pairs = clause_values.string.map(function(v) {
+ return {
+ value: ('%%' + v).slice(2),
+ expected: clause_type !== 'smi' ? 'ok-' + v : undefined
+ };
+ });
+ } else if (test_type === 'oddball') {
+ pairs = [
+ { value: null, expected: undefined },
+ { value: NaN, expected: undefined },
+ { value: undefined, expected: undefined }
+ ];
+ } else if (test_type === 'smi') {
+ pairs = clause_values.smi.map(function(v) {
+ return {
+ value: v,
+ expected: clause_type !== 'string' ? 'ok-' + v : undefined
+ };
+ });
+ } else if (test_type === 'heapnum') {
+ pairs = clause_values.smi.map(function(v) {
+ return {
+ value: ((v * 17)/16) - ((v*17)%16/16),
+ expected: clause_type !== 'string' ? 'ok-' + v : undefined
+ };
+ });
+ }
+
+ pairs.forEach(function(pair) {
+ assertEquals(fn(pair.value), pair.expected);
+ });
+};
+
+// test_switch(clause_type, test_type, feedback, optimize);
+
+function test_switches(opt) {
+ var test_types = ['symbols', 'strings', 'oddball', 'smi', 'heapnum'];
+
+ function test(clause_type) {
+ var values = clause_values[clause_type];
+
+ test_types.forEach(function(test_type) {
+ test_switch(clause_type, test_type, 'all', opt);
+ test_switch(clause_type, test_type, 'none', opt);
+
+ // Targeting specific clause feedback
+ values.forEach(function(value) {
+ test_switch(clause_type, test_type, [value], value, opt);
+ test_switch(clause_type, test_type, value, value, opt);
+ });
+ });
+ };
+
+ test('string');
+ test('smi');
+ test('mixed');
+};
+
+test_switches(false);
+test_switches(true);
diff --git a/src/3rdparty/v8/test/mjsunit/tools/tickprocessor.js b/src/3rdparty/v8/test/mjsunit/tools/tickprocessor.js
index 30b0ec2..c48d9f3 100644
--- a/src/3rdparty/v8/test/mjsunit/tools/tickprocessor.js
+++ b/src/3rdparty/v8/test/mjsunit/tools/tickprocessor.js
@@ -376,8 +376,11 @@ function driveTickProcessorTest(
}
assertTrue(pathLen != -1);
var testsPath = TEST_FILE_NAME.substr(0, pathLen + 1);
- var tp = new TickProcessor(
- new CppEntriesProviderMock(), separateIc, ignoreUnknown, stateFilter);
+ var tp = new TickProcessor(new CppEntriesProviderMock(),
+ separateIc,
+ TickProcessor.CALL_GRAPH_SIZE,
+ ignoreUnknown,
+ stateFilter);
var pm = new PrintMonitor(testsPath + refOutput);
tp.processLogFileInTest(testsPath + logInput);
tp.printStatistics();
diff --git a/src/3rdparty/v8/test/mjsunit/undeletable-functions.js b/src/3rdparty/v8/test/mjsunit/undeletable-functions.js
index bbb798f..635ea6f 100644
--- a/src/3rdparty/v8/test/mjsunit/undeletable-functions.js
+++ b/src/3rdparty/v8/test/mjsunit/undeletable-functions.js
@@ -25,11 +25,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Test that we match JSC in making some functions undeletable.
-// See http://code.google.com/p/chromium/issues/detail?id=1717
-// The functions on these prototypes are not just undeletable. It is
-// possible to override them with new definitions, then get the old
-// version back by deleting the new definition.
+// Test that we match ECMAScript in making most builtin functions
+// deletable and only specific ones undeletable or read-only.
var array;
@@ -37,7 +34,7 @@ array = [
"toString", "toLocaleString", "join", "pop", "push", "concat", "reverse",
"shift", "unshift", "slice", "splice", "sort", "filter", "forEach", "some",
"every", "map", "indexOf", "lastIndexOf", "reduce", "reduceRight"];
-CheckJSCSemantics(Array.prototype, array, "Array prototype");
+CheckEcmaSemantics(Array.prototype, array, "Array prototype");
var old_Array_prototype = Array.prototype;
var new_Array_prototype = {};
@@ -57,12 +54,12 @@ array = [
"setUTCMinutes", "setHours", "setUTCHours", "setDate", "setUTCDate",
"setMonth", "setUTCMonth", "setFullYear", "setUTCFullYear", "toGMTString",
"toUTCString", "getYear", "setYear", "toISOString", "toJSON"];
-CheckJSCSemantics(Date.prototype, array, "Date prototype");
+CheckEcmaSemantics(Date.prototype, array, "Date prototype");
array = [
"random", "abs", "acos", "asin", "atan", "ceil", "cos", "exp", "floor", "log",
"round", "sin", "sqrt", "tan", "atan2", "pow", "max", "min"];
-CheckJSCSemantics(Math, array, "Math1");
+CheckEcmaSemantics(Math, array, "Math1");
CheckEcmaSemantics(Date, ["UTC", "parse", "now"], "Date");
@@ -114,7 +111,7 @@ array = [
"toUpperCase", "toLocaleUpperCase", "link", "anchor", "fontcolor", "fontsize",
"big", "blink", "bold", "fixed", "italics", "small", "strike", "sub", "sup",
"toJSON", "toString", "valueOf"];
-CheckJSCSemantics(String.prototype, array, "String prototype");
+CheckEcmaSemantics(String.prototype, array, "String prototype");
CheckEcmaSemantics(String, ["fromCharCode"], "String");
@@ -126,14 +123,6 @@ function CheckEcmaSemantics(type, props, name) {
}
-function CheckJSCSemantics(type, props, name) {
- print(name);
- for (var i = 0; i < props.length; i++) {
- CheckNotDeletable(type, props[i]);
- }
-}
-
-
function CheckDontDelete(type, props, name) {
print(name);
for (var i = 0; i < props.length; i++) {
@@ -156,21 +145,6 @@ function CheckDeletable(type, prop) {
}
-function CheckNotDeletable(type, prop) {
- var old = type[prop];
- if (!type[prop]) return;
- assertTrue(type.hasOwnProperty(prop), "inherited: " + prop);
- var deleted = delete type[prop];
- assertTrue(deleted, "delete operator returned false: " + prop);
- assertTrue(type.hasOwnProperty(prop), "not there after delete: " + prop);
- type[prop] = "foo";
- assertEquals("foo", type[prop], "not overwritable: " + prop);
- deleted = delete type[prop];
- assertTrue(deleted, "delete operator returned false 2nd time: " + prop);
- assertEquals(old.toString(), type[prop].toString(), "delete didn't restore the old value: " + prop);
-}
-
-
function CheckDontDeleteAttr(type, prop) {
var old = type[prop];
if (!type[prop]) return;
diff --git a/src/3rdparty/v8/test/mjsunit/unicodelctest-no-optimization.js b/src/3rdparty/v8/test/mjsunit/unicodelctest-no-optimization.js
new file mode 100644
index 0000000..3bcb5bf
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/unicodelctest-no-optimization.js
@@ -0,0 +1,4914 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --noregexp-optimization
+
+// This regexp should pick up all lower case characters. The non-BMP
+// characters are coded using explicit surrogate pairs.
+var re = /^([a-zªµºß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣա-ևᴀ-ᴫᵢ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷⁱⁿℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱼⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⴀ-ⴥꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌff-stﬓ-ﬗa-z]|\ud801[\udc28-\udc4f]|\ud835[\udc1a-\udc33\udc4e-\udc54\udc56-\udc67\udc82-\udc9b\udcb6-\udcb9\udcbb\udcbd-\udcc3\udcc5-\udccf\udcea-\udd03\udd1e-\udd37\udd52-\udd6b\udd86-\udd9f\uddba-\uddd3\uddee-\ude07\ude22-\ude3b\ude56-\ude6f\ude8a-\udea5\udec2-\udeda\udedc-\udee1\udefc-\udf14\udf16-\udf1b\udf36-\udf4e\udf50-\udf55\udf70-\udf88\udf8a-\udf8f\udfaa-\udfc2\udfc4-\udfc9\udfcb])$/;
+
+
+var answer = get_answer();
+var fuzz_answer = get_fuzz_answer();
+
+
+for (var i = 0; i < 0x10000; i++) {
+ var s = String.fromCharCode(i);
+ assertTrue(!!re.test(s) == !!answer[i]);
+}
+
+
+function BuildSurrogatePair(c) {
+ return String.fromCharCode(+0xd800 + (c >> 10)) +
+ String.fromCharCode(+0xdc00 + (c & 0x3ff));
+}
+
+fuzz_index = 0;
+fuzz();
+
+for (var i = 0x10000; i < 0x110000 && i < answer.length + 256; i++) {
+ var c = i - 0x10000;
+ assertTrue(!!re.test(BuildSurrogatePair(c)) == !!answer[i]);
+}
+
+var seed = 49734321;
+
+function rand() {
+ // To make the test results predictable, we use a 100% deterministic
+ // alternative.
+ // Robert Jenkins' 32 bit integer hash function.
+ seed = ((seed + 0x7ed55d16) + (seed << 12)) & 0xffffffff;
+ seed = ((seed ^ 0xc761c23c) ^ (seed >>> 19)) & 0xffffffff;
+ seed = ((seed + 0x165667b1) + (seed << 5)) & 0xffffffff;
+ seed = ((seed + 0xd3a2646c) ^ (seed << 9)) & 0xffffffff;
+ seed = ((seed + 0xfd7046c5) + (seed << 3)) & 0xffffffff;
+ seed = ((seed ^ 0xb55a4f09) ^ (seed >>> 16)) & 0xffffffff;
+ return (seed & 0xffff)
+}
+
+
+// Random character.
+function rc(last) {
+ var c = rand();
+ // Increase the concentration of problematic values around the page
+ // edges.
+ if (rand() & 1) {
+ c = (c & 0xff80) + (c & 3) - 2;
+ }
+ // Increase the concentration of problematic values around the ends.
+ if (rand() & 31 == 0) c = 0xfff8 + (rand() & 7)
+ if (rand() & 31 == 0) c = (rand() & 7)
+
+ // Increase the concentration of values near each other.
+ if (rand() & 1) c = last + (rand() & 15) - 8;
+ return c & 0xffff; // Only code unit values.
+}
+
+
+function fuzz() {
+ fuzz_index = 0;
+ seed = 49734321;
+ for (var i = 0; i < 1000; i++) {
+ print(i);
+ var len = rand() & 0x1f;
+ var ranges = new Array(len);
+ var last = rand();
+ for (var j = 0; j < len; j++) {
+ ranges.push(last);
+ last = rc(last);
+ }
+ ranges.sort(function (a, b) { return a - b });
+ var cc = "";
+ for (var j = 0; j < len; j++) {
+ var ch = String.fromCharCode(ranges[j]);
+ if (ch == '\\' || ch == ']') ch = '\\' + ch;
+ cc += ch;
+ if (j < len - 1 && rand() & 1) cc += '-';
+ }
+ var negated = (last & 2) != 0;
+ var prefix = negated ? "[^" : "[";
+ var re = new RegExp(prefix + cc + "]");
+ for (var j = 0; j < len; j++) {
+ retest(re, (ranges[j] - 1), negated);
+ retest(re, (ranges[j]), negated);
+ retest(re, (ranges[j] + 1), negated);
+ }
+ }
+}
+
+
+function retest(re, code, negated) {
+ var s = String.fromCharCode(code >>> 0);
+ assertTrue(negated != (!!re.test(s) == !!fuzz_answer[fuzz_index++]));
+}
+
+
+function get_fuzz_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+
+
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+];
+}
+
+
+function get_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , , , , , , , , ,1, , , , ,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,
+ 1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , ,1, ,1, ,1,1,
+ 1, , ,1, ,1, , ,1, , , ,1,1, , , , ,1, , ,1, , , ,1,1,1, , ,1, ,
+ ,1, ,1, ,1, , ,1, ,1,1, ,1, , ,1, , , ,1, ,1, , ,1,1, , ,1,1,1,
+ , , , , , ,1, , ,1, , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, , ,1, ,1, , , ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1, , ,1, , ,1,
+ 1, ,1, , , , ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, ,1, , , ,1, , , ,1,1,1, , ,
+ , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1, , , ,1,1,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1, ,1, , ,1, , ,1,1, , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, , , , , , , , , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1, , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1, , , , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1, ,1,1, , , , , , ,1, ,
+ , ,1,1,1, ,1,1, , , , , , , , ,1,1,1,1, , ,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , ,1,1,1, ,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , ,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , ,1,1, , , ,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , ,1, , , , ,1, , , , ,1, , ,1,1, , ,
+ , , , , , ,1,1,1,1, , , , ,1, , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,
+ ,1, , , ,1,1, ,1, ,1, ,1, , , , ,1, ,1,1, ,1,1,1,1,1,1,1, , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1, ,1, ,1, , ,1,
+ ,1, ,1, ,1, ,1, , , , ,1, , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1, , , , , , , , , , , , ,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1, ,1, ,1,1,1,
+ 1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,
+ 1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1, ,1,1,1,1,1,1, ,1];
+}
diff --git a/src/3rdparty/v8/test/mjsunit/unicodelctest.js b/src/3rdparty/v8/test/mjsunit/unicodelctest.js
new file mode 100644
index 0000000..2caaabd
--- /dev/null
+++ b/src/3rdparty/v8/test/mjsunit/unicodelctest.js
@@ -0,0 +1,4912 @@
+// Copyright 2012 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// This regexp should pick up all lower case characters. The non-BMP
+// characters are coded using explicit surrogate pairs.
+var re = /^([a-zªµºß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣա-ևᴀ-ᴫᵢ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷⁱⁿℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱼⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⴀ-ⴥꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌff-stﬓ-ﬗa-z]|\ud801[\udc28-\udc4f]|\ud835[\udc1a-\udc33\udc4e-\udc54\udc56-\udc67\udc82-\udc9b\udcb6-\udcb9\udcbb\udcbd-\udcc3\udcc5-\udccf\udcea-\udd03\udd1e-\udd37\udd52-\udd6b\udd86-\udd9f\uddba-\uddd3\uddee-\ude07\ude22-\ude3b\ude56-\ude6f\ude8a-\udea5\udec2-\udeda\udedc-\udee1\udefc-\udf14\udf16-\udf1b\udf36-\udf4e\udf50-\udf55\udf70-\udf88\udf8a-\udf8f\udfaa-\udfc2\udfc4-\udfc9\udfcb])$/;
+
+
+var answer = get_answer();
+var fuzz_answer = get_fuzz_answer();
+
+
+for (var i = 0; i < 0x10000; i++) {
+ var s = String.fromCharCode(i);
+ assertTrue(!!re.test(s) == !!answer[i]);
+}
+
+
+function BuildSurrogatePair(c) {
+ return String.fromCharCode(+0xd800 + (c >> 10)) +
+ String.fromCharCode(+0xdc00 + (c & 0x3ff));
+}
+
+fuzz_index = 0;
+fuzz();
+
+for (var i = 0x10000; i < 0x110000 && i < answer.length + 256; i++) {
+ var c = i - 0x10000;
+ assertTrue(!!re.test(BuildSurrogatePair(c)) == !!answer[i]);
+}
+
+var seed = 49734321;
+
+function rand() {
+ // To make the test results predictable, we use a 100% deterministic
+ // alternative.
+ // Robert Jenkins' 32 bit integer hash function.
+ seed = ((seed + 0x7ed55d16) + (seed << 12)) & 0xffffffff;
+ seed = ((seed ^ 0xc761c23c) ^ (seed >>> 19)) & 0xffffffff;
+ seed = ((seed + 0x165667b1) + (seed << 5)) & 0xffffffff;
+ seed = ((seed + 0xd3a2646c) ^ (seed << 9)) & 0xffffffff;
+ seed = ((seed + 0xfd7046c5) + (seed << 3)) & 0xffffffff;
+ seed = ((seed ^ 0xb55a4f09) ^ (seed >>> 16)) & 0xffffffff;
+ return (seed & 0xffff)
+}
+
+
+// Random character.
+function rc(last) {
+ var c = rand();
+ // Increase the concentration of problematic values around the page
+ // edges.
+ if (rand() & 1) {
+ c = (c & 0xff80) + (c & 3) - 2;
+ }
+ // Increase the concentration of problematic values around the ends.
+ if (rand() & 31 == 0) c = 0xfff8 + (rand() & 7)
+ if (rand() & 31 == 0) c = (rand() & 7)
+
+ // Increase the concentration of values near each other.
+ if (rand() & 1) c = last + (rand() & 15) - 8;
+ return c & 0xffff; // Only code unit values.
+}
+
+
+function fuzz() {
+ fuzz_index = 0;
+ seed = 49734321;
+ for (var i = 0; i < 1000; i++) {
+ var len = rand() & 0x1f;
+ var ranges = new Array(len);
+ var last = rand();
+ for (var j = 0; j < len; j++) {
+ ranges.push(last);
+ last = rc(last);
+ }
+ ranges.sort(function (a, b) { return a - b });
+ var cc = "";
+ for (var j = 0; j < len; j++) {
+ var ch = String.fromCharCode(ranges[j]);
+ if (ch == '\\' || ch == ']') ch = '\\' + ch;
+ cc += ch;
+ if (j < len - 1 && rand() & 1) cc += '-';
+ }
+ var negated = (last & 2) != 0;
+ var prefix = negated ? "[^" : "[";
+ var re = new RegExp(prefix + cc + "]");
+ for (var j = 0; j < len; j++) {
+ retest(re, (ranges[j] - 1), negated);
+ retest(re, (ranges[j]), negated);
+ retest(re, (ranges[j] + 1), negated);
+ }
+ }
+}
+
+
+function retest(re, code, negated) {
+ var s = String.fromCharCode(code >>> 0);
+ assertTrue(negated != (!!re.test(s) == !!fuzz_answer[fuzz_index++]));
+}
+
+
+function get_fuzz_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+
+
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,0,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,
+ 0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,
+ 0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,
+
+ 0,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,1,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,0,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,1,1,0,0,1,0,0,1,0,
+ 0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,
+ 0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,0,
+ 0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,1,1,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,1,1,0,0,1,1,1,1,0,0,1,0,0,1,1,1,1,0,
+];
+}
+
+
+function get_answer() {
+ // Test data generated with V8 version 3.7.
+return [
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , , , , , , , , ,1, , , , ,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,
+ 1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , ,1, ,1, ,1,1,
+ 1, , ,1, ,1, , ,1, , , ,1,1, , , , ,1, , ,1, , , ,1,1,1, , ,1, ,
+ ,1, ,1, ,1, , ,1, ,1,1, ,1, , ,1, , , ,1, ,1, , ,1,1, , ,1,1,1,
+ , , , , , ,1, , ,1, , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, , ,1, ,1, , , ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1, , ,1, , ,1,
+ 1, ,1, , , , ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, ,1, , , ,1, , , ,1,1,1, , ,
+ , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1, , , ,1,1,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1, ,1, , ,1, , ,1,1, , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, , , , , , , , , ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1, , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1, , , , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1,1,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , ,1,1,1,1,1, ,1,1, , , , , , ,1, ,
+ , ,1,1,1, ,1,1, , , , , , , , ,1,1,1,1, , ,1,1, , , , , , , , ,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , ,1,1,1, ,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , ,1, , , , , , , , , , , , , ,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1, , , ,1,1, , , ,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , ,1, , , , ,1, , , , ,1, , ,1,1, , ,
+ , , , , , ,1,1,1,1, , , , ,1, , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , ,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,
+ ,1, , , ,1,1, ,1, ,1, ,1, , , , ,1, ,1,1, ,1,1,1,1,1,1,1, , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , , , , , , , , , , , ,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,
+ ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1, ,1,1,1,1,1,1,1,1, ,1, ,1, , ,1,
+ ,1, ,1, ,1, ,1, , , , ,1, , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ 1,1,1,1,1,1,1, , , , , , , , , , , , ,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1, ,1, ,1,1,1,
+ 1,1,1,1, ,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , ,
+ , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , ,
+ , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , ,
+ , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,
+ , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,
+ 1,1, , , , , , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , ,
+ , , , , , , , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , ,
+ , , , , , , , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1,1,1,1,1,1,1, ,1,1,1,1,1,1, , , , , , , , , , , , , , , , ,
+ , , , , , , , , , ,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
+ 1,1,1, ,1,1,1,1,1,1, ,1];
+}
diff --git a/src/3rdparty/v8/test/mozilla/mozilla.status b/src/3rdparty/v8/test/mozilla/mozilla.status
index e31a630..e64959a 100644
--- a/src/3rdparty/v8/test/mozilla/mozilla.status
+++ b/src/3rdparty/v8/test/mozilla/mozilla.status
@@ -69,6 +69,9 @@ js1_5/Array/regress-465980-02: SKIP
ecma_3/Date/15.9.3.2-1: SKIP
js1_2/function/Number: SKIP
+# TODO(2018): Temporarily allow timeout in debug mode.
+js1_5/GC/regress-203278-2: PASS || TIMEOUT if $mode == debug
+
##################### SLOW TESTS #####################
# This takes a long time to run (~100 seconds). It should only be run
@@ -122,14 +125,14 @@ ecma/Date/15.9.2.2-4: PASS || FAIL
ecma/Date/15.9.2.2-5: PASS || FAIL
ecma/Date/15.9.2.2-6: PASS || FAIL
-# 1026139: These date tests fail on arm
-ecma/Date/15.9.5.29-1: PASS || ($ARM && FAIL)
-ecma/Date/15.9.5.34-1: PASS || ($ARM && FAIL)
-ecma/Date/15.9.5.28-1: PASS || ($ARM && FAIL)
+# 1026139: These date tests fail on arm and mips
+ecma/Date/15.9.5.29-1: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Date/15.9.5.34-1: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Date/15.9.5.28-1: PASS || (($ARM || $MIPS) && FAIL)
-# 1050186: Arm vm is broken; probably unrelated to dates
-ecma/Array/15.4.4.5-3: PASS || ($ARM && FAIL)
-ecma/Date/15.9.5.22-2: PASS || ($ARM && FAIL)
+# 1050186: Arm/MIPS vm is broken; probably unrelated to dates
+ecma/Array/15.4.4.5-3: PASS || (($ARM || $MIPS) && FAIL)
+ecma/Date/15.9.5.22-2: PASS || (($ARM || $MIPS) && FAIL)
# Flaky test that fails due to what appears to be a bug in the test.
# Occurs depending on current time
@@ -213,6 +216,10 @@ js1_5/Array/regress-101964: PASS || FAIL if $mode == debug
# builtin to have undefined as the receiver.
ecma/String/15.5.4.6-2: FAIL_OK
+# Fail because it expects String.prototype.split to distinguish whether
+# separator was undefined or not passed at all.
+ecma/String/15.5.4.8-2: FAIL_OK
+
# Fail because of toLowerCase and toUpperCase conversion.
ecma/String/15.5.4.11-2: FAIL_OK
ecma/String/15.5.4.11-5: FAIL_OK
@@ -863,3 +870,59 @@ ecma/Expressions/11.7.3: SKIP
ecma/Expressions/11.10-3: SKIP
ecma/Expressions/11.7.1: SKIP
ecma_3/RegExp/regress-209067: SKIP
+
+[ $arch == mips ]
+
+# Times out and print so much output that we need to skip it to not
+# hang the builder.
+js1_5/extensions/regress-342960: SKIP
+
+# BUG(3251229): Times out when running new crankshaft test script.
+ecma_3/RegExp/regress-311414: SKIP
+ecma/Date/15.9.5.8: SKIP
+ecma/Date/15.9.5.10-2: SKIP
+ecma/Date/15.9.5.11-2: SKIP
+ecma/Date/15.9.5.12-2: SKIP
+js1_5/Array/regress-99120-02: SKIP
+js1_5/extensions/regress-371636: SKIP
+js1_5/Regress/regress-203278-1: SKIP
+js1_5/Regress/regress-404755: SKIP
+js1_5/Regress/regress-451322: SKIP
+
+
+# BUG(1040): Allow this test to timeout.
+js1_5/GC/regress-203278-2: PASS || TIMEOUT
+
+
+[ $fast == yes && $arch == mips ]
+
+# In fast mode on mips we try to skip all tests that would time out,
+# since running the tests takes so long in the first place.
+
+js1_5/Regress/regress-280769-2: SKIP
+js1_5/Regress/regress-280769-3: SKIP
+js1_5/Regress/regress-244470: SKIP
+js1_5/Regress/regress-203278-1: SKIP
+js1_5/Regress/regress-290575: SKIP
+js1_5/Regress/regress-159334: SKIP
+js1_5/Regress/regress-321971: SKIP
+js1_5/Regress/regress-347306-01: SKIP
+js1_5/Regress/regress-280769-1: SKIP
+js1_5/Regress/regress-280769-5: SKIP
+js1_5/GC/regress-306788: SKIP
+js1_5/GC/regress-278725: SKIP
+js1_5/GC/regress-203278-3: SKIP
+js1_5/GC/regress-311497: SKIP
+js1_5/Array/regress-99120-02: SKIP
+ecma/Date/15.9.5.22-1: SKIP
+ecma/Date/15.9.5.20: SKIP
+ecma/Date/15.9.5.12-2: SKIP
+ecma/Date/15.9.5.8: SKIP
+ecma/Date/15.9.5.9: SKIP
+ecma/Date/15.9.5.11-2: SKIP
+ecma/Expressions/11.7.2: SKIP
+ecma/Expressions/11.10-2: SKIP
+ecma/Expressions/11.7.3: SKIP
+ecma/Expressions/11.10-3: SKIP
+ecma/Expressions/11.7.1: SKIP
+ecma_3/RegExp/regress-209067: SKIP
diff --git a/src/3rdparty/v8/test/preparser/preparser.status b/src/3rdparty/v8/test/preparser/preparser.status
index db17778..6f15fed 100644
--- a/src/3rdparty/v8/test/preparser/preparser.status
+++ b/src/3rdparty/v8/test/preparser/preparser.status
@@ -31,9 +31,3 @@ prefix preparser
# escapes (we need to parse to distinguish octal escapes from valid
# back-references).
strict-octal-regexp: FAIL
-
-##############################################################################
-[ $arch == mips ]
-
-# Skip all tests on MIPS.
-*: SKIP
diff --git a/src/3rdparty/v8/test/sputnik/sputnik.status b/src/3rdparty/v8/test/sputnik/sputnik.status
index 135540e..a4c7d57 100644
--- a/src/3rdparty/v8/test/sputnik/sputnik.status
+++ b/src/3rdparty/v8/test/sputnik/sputnik.status
@@ -125,6 +125,13 @@ S15.5.2_D2: PASS || FAIL_OK
S15.5.4.11_D1.1_T1: PASS || FAIL_OK
S15.5.4.11_D1.1_T3: PASS || FAIL_OK
S12.6.4_D1: PASS || FAIL_OK
+S15.5.4.14_A1_T6: FAIL_OK
+S15.5.4.14_A1_T7: FAIL_OK
+S15.5.4.14_A1_T8: FAIL_OK
+S15.5.4.14_A1_T9: FAIL_OK
+S15.5.4.14_A2_T7: FAIL_OK
+S15.10.2.12_A1_T1: FAIL_OK
+S15.10.2.12_A2_T1: FAIL_OK
# We allow function declarations within statements
S12.6.2_A13_T1: FAIL_OK
@@ -189,7 +196,6 @@ S15.3.4.3_A6_T4: FAIL_OK
S15.4.4.2_A2_T1: FAIL_OK
S15.4.4.3_A2_T1: FAIL_OK
-
##################### SKIPPED TESTS #####################
# These tests take a looong time to run in debug mode.
@@ -208,53 +214,6 @@ S15.3_A3_T1: FAIL
# Invalid test case (recent change adding var changes semantics)
S15.3_A3_T3: FAIL
-# These tests fail because we had to add bugs to be compatible with JSC. See
-# http://code.google.com/p/chromium/issues/detail?id=1717
-S15.5.4.1_A1_T2: FAIL_OK
-S15.5.4_A1: FAIL_OK
-S15.5.4_A3: FAIL_OK
-S15.9.5.10_A1_T2: FAIL_OK
-S15.9.5.11_A1_T2: FAIL_OK
-S15.9.5.12_A1_T2: FAIL_OK
-S15.9.5.13_A1_T2: FAIL_OK
-S15.9.5.14_A1_T2: FAIL_OK
-S15.9.5.15_A1_T2: FAIL_OK
-S15.9.5.16_A1_T2: FAIL_OK
-S15.9.5.17_A1_T2: FAIL_OK
-S15.9.5.18_A1_T2: FAIL_OK
-S15.9.5.19_A1_T2: FAIL_OK
-S15.9.5.20_A1_T2: FAIL_OK
-S15.9.5.21_A1_T2: FAIL_OK
-S15.9.5.22_A1_T2: FAIL_OK
-S15.9.5.23_A1_T2: FAIL_OK
-S15.9.5.24_A1_T2: FAIL_OK
-S15.9.5.25_A1_T2: FAIL_OK
-S15.9.5.26_A1_T2: FAIL_OK
-S15.9.5.27_A1_T2: FAIL_OK
-S15.9.5.28_A1_T2: FAIL_OK
-S15.9.5.29_A1_T2: FAIL_OK
-S15.9.5.2_A1_T2: FAIL_OK
-S15.9.5.30_A1_T2: FAIL_OK
-S15.9.5.31_A1_T2: FAIL_OK
-S15.9.5.32_A1_T2: FAIL_OK
-S15.9.5.33_A1_T2: FAIL_OK
-S15.9.5.34_A1_T2: FAIL_OK
-S15.9.5.35_A1_T2: FAIL_OK
-S15.9.5.36_A1_T2: FAIL_OK
-S15.9.5.37_A1_T2: FAIL_OK
-S15.9.5.38_A1_T2: FAIL_OK
-S15.9.5.39_A1_T2: FAIL_OK
-S15.9.5.3_A1_T2: FAIL_OK
-S15.9.5.40_A1_T2: FAIL_OK
-S15.9.5.41_A1_T2: FAIL_OK
-S15.9.5.42_A1_T2: FAIL_OK
-S15.9.5.4_A1_T2: FAIL_OK
-S15.9.5.5_A1_T2: FAIL_OK
-S15.9.5.6_A1_T2: FAIL_OK
-S15.9.5.7_A1_T2: FAIL_OK
-S15.9.5.8_A1_T2: FAIL_OK
-S15.9.5.9_A1_T2: FAIL_OK
-
[ $arch == arm ]
# BUG(3251225): Tests that timeout with --nocrankshaft.
@@ -271,5 +230,14 @@ S15.1.3.2_A2.5_T1: SKIP
[ $arch == mips ]
-# Skip all tests on MIPS.
-*: SKIP
+# BUG(3251225): Tests that timeout with --nocrankshaft.
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
+S15.1.3.1_A2.4_T1: SKIP
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.4_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
+S15.1.3.3_A2.3_T1: SKIP
+S15.1.3.4_A2.3_T1: SKIP
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
diff --git a/src/3rdparty/v8/test/test262/README b/src/3rdparty/v8/test/test262/README
index 094356f..dae1843 100644
--- a/src/3rdparty/v8/test/test262/README
+++ b/src/3rdparty/v8/test/test262/README
@@ -4,11 +4,11 @@ tests from
http://hg.ecmascript.org/tests/test262
-at revision 271 as 'data' in this directory. Using later version
+at revision 309 as 'data' in this directory. Using later version
may be possible but the tests are only known to pass (and indeed run)
with that revision.
-hg clone -r 271 http://hg.ecmascript.org/tests/test262 data
+hg clone -r 309 http://hg.ecmascript.org/tests/test262 data
If you do update to a newer revision you may have to change the test
harness adapter code since it uses internal functionality from the
diff --git a/src/3rdparty/v8/test/test262/test262.status b/src/3rdparty/v8/test/test262/test262.status
index 4bdd6c1..3f395bd 100644
--- a/src/3rdparty/v8/test/test262/test262.status
+++ b/src/3rdparty/v8/test/test262/test262.status
@@ -33,115 +33,11 @@ def FAIL_OK = FAIL, OKAY
# '__proto__' should be treated as a normal property in JSON.
S15.12.2_A1: FAIL
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1196
-S8.7_A5_T2: FAIL
-
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1624
-S10.4.2.1_A1: FAIL
-
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1530
-S15.3.3.1_A4: FAIL
-
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1756
-15.2.3.6-4-116: FAIL
-15.2.3.6-4-117: FAIL
-15.2.3.6-4-126: FAIL
-15.2.3.6-4-127: FAIL
-15.2.3.6-4-128: FAIL
-15.2.3.6-4-129: FAIL
-15.2.3.6-4-130: FAIL
-15.2.3.6-4-131: FAIL
-15.2.3.6-4-132: FAIL
-15.2.3.6-4-137: FAIL
-15.2.3.6-4-142: FAIL
-15.2.3.6-4-143: FAIL
-15.2.3.6-4-144: FAIL
-15.2.3.6-4-146: FAIL
-15.2.3.6-4-147: FAIL
-15.2.3.6-4-148: FAIL
-15.2.3.6-4-149: FAIL
-15.2.3.6-4-151: FAIL
-15.2.3.6-4-154: FAIL
-15.2.3.6-4-155: FAIL
-15.2.3.6-4-159: FAIL
-15.2.3.6-4-161: FAIL
-15.2.3.6-4-165: FAIL
-15.2.3.6-4-166: FAIL
-15.2.3.6-4-167: FAIL
-15.2.3.6-4-168: FAIL
-15.2.3.6-4-169: FAIL
-15.2.3.6-4-170: FAIL
-15.2.3.6-4-171: FAIL
-15.2.3.6-4-172: FAIL
-15.2.3.6-4-173: FAIL
-15.2.3.6-4-174: FAIL
-15.2.3.6-4-175: FAIL
-15.2.3.6-4-176: FAIL
-15.2.3.6-4-177: FAIL
-15.2.3.6-4-178: FAIL
-15.2.3.6-4-179-1: FAIL
-15.2.3.6-4-181: FAIL
-15.2.3.7-6-a-112: FAIL
-15.2.3.7-6-a-113: FAIL
-15.2.3.7-6-a-122: FAIL
-15.2.3.7-6-a-123: FAIL
-15.2.3.7-6-a-124: FAIL
-15.2.3.7-6-a-125: FAIL
-15.2.3.7-6-a-126: FAIL
-15.2.3.7-6-a-127: FAIL
-15.2.3.7-6-a-128: FAIL
-15.2.3.7-6-a-133: FAIL
-15.2.3.7-6-a-138: FAIL
-15.2.3.7-6-a-139: FAIL
-15.2.3.7-6-a-140: FAIL
-15.2.3.7-6-a-142: FAIL
-15.2.3.7-6-a-143: FAIL
-15.2.3.7-6-a-144: FAIL
-15.2.3.7-6-a-145: FAIL
-15.2.3.7-6-a-147: FAIL
-15.2.3.7-6-a-150: FAIL
-15.2.3.7-6-a-151: FAIL
-15.2.3.7-6-a-155: FAIL
-15.2.3.7-6-a-157: FAIL
-15.2.3.7-6-a-161: FAIL
-15.2.3.7-6-a-162: FAIL
-15.2.3.7-6-a-163: FAIL
-15.2.3.7-6-a-164: FAIL
-15.2.3.7-6-a-165: FAIL
-15.2.3.7-6-a-166: FAIL
-15.2.3.7-6-a-167: FAIL
-15.2.3.7-6-a-168: FAIL
-15.2.3.7-6-a-169: FAIL
-15.2.3.7-6-a-170: FAIL
-15.2.3.7-6-a-171: FAIL
-15.2.3.7-6-a-172: FAIL
-15.2.3.7-6-a-173: FAIL
-15.2.3.7-6-a-174: FAIL
-15.2.3.7-6-a-175: FAIL
-15.2.3.7-6-a-176: FAIL
-15.2.3.7-6-a-177: FAIL
-
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1772
-15.2.3.6-4-292-1: FAIL
-15.2.3.6-4-293-2: FAIL
-15.2.3.6-4-293-3: FAIL
-15.2.3.6-4-294-1: FAIL
-15.2.3.6-4-295-1: FAIL
-15.2.3.6-4-296-1: FAIL
-15.2.3.6-4-333-11: FAIL
-15.2.3.7-6-a-281: FAIL
-15.2.3.7-6-a-282: FAIL
-15.2.3.7-6-a-283: FAIL
-15.2.3.7-6-a-284: FAIL
-15.2.3.7-6-a-285: FAIL
-
-# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1790
-15.4.4.22-9-9: FAIL
-
-# Invalid test cases (recent change adding var changes semantics)
-S8.3_A1_T1: FAIL
-S15.3_A3_T1: FAIL
-S15.3_A3_T3: FAIL
+# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1475
+15.2.3.6-4-405: FAIL
+15.2.3.6-4-410: FAIL
+15.2.3.6-4-415: FAIL
+15.2.3.6-4-420: FAIL
##################### DELIBERATE INCOMPATIBILITIES #####################
@@ -149,32 +45,18 @@ S15.3_A3_T3: FAIL
# octal numbers in order to not break the web.
S15.1.2.2_A5.1_T1: FAIL_OK
-# This tests precision of trignometric functions. We're slightly off
-# from the implementation in libc (~ 1e-17) but it's not clear if we
-# or they are closer to the right answer, or if it even matters.
+# This tests precision of Math.tan and Math.sin. The implementation for those
+# trigonometric functions are platform/compiler dependent. Furthermore, the
+# expectation values by far deviates from the actual result given by an
+# arbitrary-precision calculator, making those tests partly bogus.
S15.8.2.16_A7: PASS || FAIL_OK
S15.8.2.18_A7: PASS || FAIL_OK
-S15.8.2.13_A23: PASS || FAIL_OK
-
-# Sputnik tests (r97) assume RegExp.prototype is an Object, not a RegExp.
-S15.10.6_A2: FAIL_OK
-
-# We are silent in some regexp cases where the spec wants us to give
-# errors, for compatibility.
-S15.10.2.11_A1_T2: FAIL
-S15.10.2.11_A1_T3: FAIL
# We are more lenient in which string character escapes we allow than
# the spec (7.8.4 p. 19) wants us to be. This is for compatibility.
-S7.8.4_A4.3_T3: FAIL_OK
-S7.8.4_A4.3_T4: FAIL_OK
-S7.8.4_A4.3_T5: FAIL_OK
-S7.8.4_A4.3_T6: FAIL_OK
S7.8.4_A6.1_T4: FAIL_OK
S7.8.4_A6.2_T1: FAIL_OK
S7.8.4_A6.2_T2: FAIL_OK
-S7.8.4_A6.4_T1: FAIL_OK
-S7.8.4_A6.4_T2: FAIL_OK
S7.8.4_A7.1_T4: FAIL_OK
S7.8.4_A7.2_T1: FAIL_OK
S7.8.4_A7.2_T2: FAIL_OK
@@ -182,237 +64,26 @@ S7.8.4_A7.2_T3: FAIL_OK
S7.8.4_A7.2_T4: FAIL_OK
S7.8.4_A7.2_T5: FAIL_OK
S7.8.4_A7.2_T6: FAIL_OK
-S7.8.4_A7.4_T1: FAIL_OK
-S7.8.4_A7.4_T2: FAIL_OK
-
-# Sputnik expects unicode escape sequences in RegExp flags to be interpreted.
-# The specification requires them to be passed uninterpreted to the RegExp
-# constructor. We now implement that.
-S7.8.5_A3.1_T7: FAIL_OK
-S7.8.5_A3.1_T8: FAIL_OK
-S7.8.5_A3.1_T9: FAIL_OK
-
-# We allow some keywords to be used as identifiers.
-S7.5.3_A1.15: FAIL_OK
-S7.5.3_A1.18: FAIL_OK
-S7.5.3_A1.21: FAIL_OK
-S7.5.3_A1.22: FAIL_OK
-S7.5.3_A1.23: FAIL_OK
-S7.5.3_A1.24: FAIL_OK
-S7.5.3_A1.26: FAIL_OK
-
-# This checks for non-262 behavior
-S7.6_D1: PASS || FAIL_OK
-S7.6_D2: PASS || FAIL_OK
-S8.4_D1.1: PASS || FAIL_OK
-S8.4_D2.1: PASS || FAIL_OK
-S8.4_D2.2: PASS || FAIL_OK
-S8.4_D2.3: PASS || FAIL_OK
-S8.4_D2.4: PASS || FAIL_OK
-S8.4_D2.5: PASS || FAIL_OK
-S8.4_D2.6: PASS || FAIL_OK
-S8.4_D2.7: PASS || FAIL_OK
-S11.4.3_D1.2: PASS || FAIL_OK
-S12.6.4_A14_T1: PASS || FAIL_OK
-S12.6.4_D1: PASS || FAIL_OK
-S12.6.4_R1: PASS || FAIL_OK
-S12.6.4_R2: PASS || FAIL_OK
-S13.2_D1.2: PASS || FAIL_OK
-S13_D1_T1: PASS || FAIL_OK
-S14_D4_T3: PASS || FAIL_OK
-S14_D7: PASS || FAIL_OK
-S15.1.2.2_D1.2: PASS || FAIL_OK
-S15.5.2_D2: PASS || FAIL_OK
-S15.5.4.11_D1.1_T1: PASS || FAIL_OK
-S15.5.4.11_D1.1_T2: PASS || FAIL_OK
-S15.5.4.11_D1.1_T3: PASS || FAIL_OK
-S15.5.4.11_D1.1_T4: PASS || FAIL_OK
-# We allow function declarations within statements
-S12.6.2_A13_T1: FAIL_OK
-S12.6.2_A13_T2: FAIL_OK
-S12.6.4_A13_T1: FAIL_OK
-S12.6.4_A13_T2: FAIL_OK
-S15.3.4.2_A1_T1: FAIL_OK
-
-# Linux and Mac defaults to extended 80 bit floating point format in the FPU.
+# Linux for ia32 (and therefore simulators) default to extended 80 bit floating
+# point formats, so these tests checking 64-bit FP precision fail. The other
+# platforms/arch's pass these tests.
# We follow the other major JS engines by keeping this default.
-S8.5_A2.2: PASS, FAIL if $system == linux, FAIL if $system == macos
-S8.5_A2.1: PASS, FAIL if $system == linux, FAIL if $system == macos
-
-# These tests fail because we had to add bugs to be compatible with JSC. See
-# http://code.google.com/p/chromium/issues/detail?id=1717
-S15.5.4.1_A1_T2: FAIL_OK
-S15.5.4_A1: FAIL_OK
-S15.5.4_A3: FAIL_OK
-S15.9.5.10_A1_T2: FAIL_OK
-S15.9.5.11_A1_T2: FAIL_OK
-S15.9.5.12_A1_T2: FAIL_OK
-S15.9.5.13_A1_T2: FAIL_OK
-S15.9.5.14_A1_T2: FAIL_OK
-S15.9.5.15_A1_T2: FAIL_OK
-S15.9.5.16_A1_T2: FAIL_OK
-S15.9.5.17_A1_T2: FAIL_OK
-S15.9.5.18_A1_T2: FAIL_OK
-S15.9.5.19_A1_T2: FAIL_OK
-S15.9.5.20_A1_T2: FAIL_OK
-S15.9.5.21_A1_T2: FAIL_OK
-S15.9.5.22_A1_T2: FAIL_OK
-S15.9.5.23_A1_T2: FAIL_OK
-S15.9.5.24_A1_T2: FAIL_OK
-S15.9.5.25_A1_T2: FAIL_OK
-S15.9.5.26_A1_T2: FAIL_OK
-S15.9.5.27_A1_T2: FAIL_OK
-S15.9.5.28_A1_T2: FAIL_OK
-S15.9.5.29_A1_T2: FAIL_OK
-S15.9.5.2_A1_T2: FAIL_OK
-S15.9.5.30_A1_T2: FAIL_OK
-S15.9.5.31_A1_T2: FAIL_OK
-S15.9.5.32_A1_T2: FAIL_OK
-S15.9.5.33_A1_T2: FAIL_OK
-S15.9.5.34_A1_T2: FAIL_OK
-S15.9.5.35_A1_T2: FAIL_OK
-S15.9.5.36_A1_T2: FAIL_OK
-S15.9.5.37_A1_T2: FAIL_OK
-S15.9.5.38_A1_T2: FAIL_OK
-S15.9.5.39_A1_T2: FAIL_OK
-S15.9.5.3_A1_T2: FAIL_OK
-S15.9.5.40_A1_T2: FAIL_OK
-S15.9.5.41_A1_T2: FAIL_OK
-S15.9.5.42_A1_T2: FAIL_OK
-S15.9.5.4_A1_T2: FAIL_OK
-S15.9.5.5_A1_T2: FAIL_OK
-S15.9.5.6_A1_T2: FAIL_OK
-S15.9.5.7_A1_T2: FAIL_OK
-S15.9.5.8_A1_T2: FAIL_OK
-S15.9.5.9_A1_T2: FAIL_OK
-
-############################# ES3 TESTS ################################
-# These tests check for ES3 semantics, and differ from ES5.
-# When we follow ES5 semantics, it's ok to fail the test.
-
-# Allow keywords as names of properties in object initialisers and
-# in dot-notation property access.
-S11.1.5_A4.1: FAIL_OK
-S11.1.5_A4.2: FAIL_OK
-
-# Calls builtins without an explicit receiver which means that
-# undefined is passed to the builtin. The tests expect the global
-# object to be passed which was true in ES3 but not in ES5.
-S11.1.1_A2: FAIL_OK
-S15.5.4.4_A1_T3: FAIL_OK
-S15.5.4.5_A1_T3: FAIL_OK
-S15.5.4.6_A1_T3: FAIL_OK
-S15.5.4.7_A1_T3: FAIL_OK
-S15.5.4.8_A1_T3: FAIL_OK
-S15.5.4.9_A1_T3: FAIL_OK
-S15.5.4.10_A1_T3: FAIL_OK
-S15.5.4.11_A1_T3: FAIL_OK
-S15.5.4.12_A1_T3: FAIL_OK
-S15.5.4.13_A1_T3: FAIL_OK
-S15.5.4.14_A1_T3: FAIL_OK
-S15.5.4.15_A1_T3: FAIL_OK
-
-# NaN, Infinity and undefined are read-only according to ES5.
-S15.1.1.1_A2_T1: FAIL_OK # NaN
-S15.1.1.1_A2_T2: FAIL_OK # NaN
-S15.1.1.2_A2_T1: FAIL_OK # Infinity
-# S15.1.1.2_A2_T2 would fail if it weren't bogus in r97. sputnik bug #45.
-S15.1.1.3_A2_T1: FAIL_OK # undefined
-S15.1.1.3_A2_T2: FAIL_OK # undefined
-
-# Array.prototype.to[Locale]String is generic in ES5.
-S15.4.4.2_A2_T1: FAIL_OK
-S15.4.4.3_A2_T1: FAIL_OK
-
-######################### UNANALYZED FAILURES ##########################
-
-# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
-# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
-# accessor property, 'A' is an Array object (8.12.9 - step 9.b.i)
-15.2.3.6-4-360-1: FAIL
-# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
-# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
-# accessor property, 'O' is an Arguments object (8.12.9 - step 9.b.i)
-15.2.3.6-4-360-6: FAIL
-# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are
-# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an
-# accessor property, 'O' is the global object (8.12.9 - step 9.b.i)
-15.2.3.6-4-360-7: FAIL
-# Bug? ES5 Attributes - Failed to add a property to an object when the object's
-# object has a property with same name and [[Writable]] attribute is set to
-# false (Number instance)
-15.2.3.6-4-405: FAIL
-# Bug? ES5 Attributes - Failed to add a property to an object when the object's
-# prototype has a property with the same name and [[Writable]] set to false
-# (JSON)
-15.2.3.6-4-410: FAIL
-# Bug? ES5 Attributes - Failed to add properties to an object when the object's
-# prototype has properties with the same name and [[Writable]] set to false
-# (Object.create)
-15.2.3.6-4-415: FAIL
-# Bug? ES5 Attributes - Failed to add a property to an object when the object's
-# prototype has a property with the same name and [[Writable]] set to
-# false(Function.prototype.bind)
-15.2.3.6-4-420: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.indexOf are correct
-15.2.3.6-4-612: FAIL
-# Bug? ES5 Attributes - all attributes in Object.lastIndexOf are correct
-15.2.3.6-4-613: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.every are correct
-15.2.3.6-4-614: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.some are correct
-15.2.3.6-4-615: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.forEach are correct
-15.2.3.6-4-616: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.map are correct
-15.2.3.6-4-617: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.filter are correct
-15.2.3.6-4-618: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.reduce are correct
-15.2.3.6-4-619: FAIL
-# Bug? ES5 Attributes - all attributes in Array.prototype.reduceRight are
-# correct
-15.2.3.6-4-620: FAIL
-# Bug? ES5 Attributes - all attributes in String.prototype.trim are correct
-15.2.3.6-4-621: FAIL
-# Bug? ES5 Attributes - all attributes in Date.prototype.toISOString are correct
-15.2.3.6-4-623: FAIL
-# Bug? ES5 Attributes - all attributes in Date.prototype.toJSON are correct
-15.2.3.6-4-624: FAIL
-# Bug? Array.prototype.indexOf - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.14-9-a-19: FAIL
-# Bug? Array.prototype.lastIndexOf - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.15-8-a-19: FAIL
-# Bug? Array.prototype.every - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.16-7-b-16: FAIL
-# Bug? Array.prototype.some - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.17-7-b-16: FAIL
-# Bug? Array.prototype.forEach - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.18-7-b-16: FAIL
-# Bug? Array.prototype.map - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.19-8-b-16: FAIL
-# Bug? Array.prototype.filter - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.20-9-b-16: FAIL
-# Bug? Array.prototype.reduce - decreasing length of array in step 8 does not
-# delete non-configurable properties
-15.4.4.21-9-b-16: FAIL
-# Bug? Array.prototype.reduce - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.21-9-b-29: FAIL
-# Bug? Array.prototype.reduceRight - decreasing length of array in step 8 does
-# not delete non-configurable properties
-15.4.4.22-9-b-16: FAIL
-# Bug? Array.prototype.reduceRight - decreasing length of array does not delete
-# non-configurable properties
-15.4.4.22-9-b-29: FAIL
+S8.5_A2.1: PASS || FAIL_OK
+S8.5_A2.2: PASS || FAIL_OK
+
+############################ INVALID TESTS #############################
+
+# The reference value calculated by Test262 is incorrect if you run these tests
+# in PST/PDT between first Sunday in March and first Sunday in April. The DST
+# switch was moved in 2007 whereas Test262 bases the reference value on 2000.
+# Test262 Bug: https://bugs.ecmascript.org/show_bug.cgi?id=293
+S15.9.3.1_A5_T1: PASS || FAIL_OK
+S15.9.3.1_A5_T2: PASS || FAIL_OK
+S15.9.3.1_A5_T3: PASS || FAIL_OK
+S15.9.3.1_A5_T4: PASS || FAIL_OK
+S15.9.3.1_A5_T5: PASS || FAIL_OK
+S15.9.3.1_A5_T6: PASS || FAIL_OK
############################ SKIPPED TESTS #############################
@@ -436,5 +107,14 @@ S15.1.3.2_A2.5_T1: SKIP
[ $arch == mips ]
-# Skip all tests on MIPS.
-*: SKIP
+# BUG(3251225): Tests that timeout with --nocrankshaft.
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
+S15.1.3.1_A2.4_T1: SKIP
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.4_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
+S15.1.3.3_A2.3_T1: SKIP
+S15.1.3.4_A2.3_T1: SKIP
+S15.1.3.1_A2.5_T1: SKIP
+S15.1.3.2_A2.5_T1: SKIP
diff --git a/src/3rdparty/v8/test/test262/testcfg.py b/src/3rdparty/v8/test/test262/testcfg.py
index 52127cd..e4a3bde 100644
--- a/src/3rdparty/v8/test/test262/testcfg.py
+++ b/src/3rdparty/v8/test/test262/testcfg.py
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -29,8 +29,14 @@
import test
import os
from os.path import join, exists
+import urllib
+import hashlib
+import tarfile
+TEST_262_ARCHIVE_REVISION = '3a890174343c' # This is the r309 revision.
+TEST_262_ARCHIVE_MD5 = 'be5d4cfbe69cef70430907b8f3a92b50'
+TEST_262_URL = 'http://hg.ecmascript.org/tests/test262/archive/%s.tar.bz2'
TEST_262_HARNESS = ['sta.js']
@@ -55,7 +61,6 @@ class Test262TestCase(test.TestCase):
def GetCommand(self):
result = self.context.GetVmCommand(self, self.mode)
- result += ['-e', 'var window = this']
result += self.framework
result.append(self.filename)
return result
@@ -94,6 +99,26 @@ class Test262TestConfiguration(test.TestConfiguration):
tests.append(test)
return tests
+ def DownloadData(self):
+ revision = TEST_262_ARCHIVE_REVISION
+ archive_url = TEST_262_URL % revision
+ archive_name = join(self.root, 'test262-%s.tar.bz2' % revision)
+ directory_name = join(self.root, 'data')
+ if not exists(archive_name):
+ print "Downloading test data from %s ..." % archive_url
+ urllib.urlretrieve(archive_url, archive_name)
+ if not exists(directory_name):
+ print "Extracting test262-%s.tar.bz2 ..." % revision
+ md5 = hashlib.md5()
+ with open(archive_name,'rb') as f:
+ for chunk in iter(lambda: f.read(8192), ''):
+ md5.update(chunk)
+ if md5.hexdigest() != TEST_262_ARCHIVE_MD5:
+ raise Exception("Hash mismatch of test data file")
+ archive = tarfile.open(archive_name, 'r:bz2')
+ archive.extractall(join(self.root))
+ os.rename(join(self.root, 'test262-%s' % revision), directory_name)
+
def GetBuildRequirements(self):
return ['d8']
diff --git a/src/3rdparty/v8/tools/bash-completion.sh b/src/3rdparty/v8/tools/bash-completion.sh
new file mode 100644
index 0000000..9f65c67
--- /dev/null
+++ b/src/3rdparty/v8/tools/bash-completion.sh
@@ -0,0 +1,55 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Inspired by and based on:
+# http://src.chromium.org/viewvc/chrome/trunk/src/tools/bash-completion
+
+# Flag completion rule for bash.
+# To load in your shell, "source path/to/this/file".
+
+v8_source=$(readlink -f $(dirname $BASH_SOURCE)/..)
+
+_v8_flag() {
+ local cur defines targets
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ defines=$(cat src/flag-definitions.h \
+ | grep "^DEFINE" \
+ | grep -v "DEFINE_implication" \
+ | sed -e 's/_/-/g')
+ targets=$(echo "$defines" \
+ | sed -ne 's/^DEFINE-[^(]*(\([^,]*\).*/--\1/p'; \
+ echo "$defines" \
+ | sed -ne 's/^DEFINE-bool(\([^,]*\).*/--no\1/p'; \
+ cat src/d8.cc \
+ | grep "strcmp(argv\[i\]" \
+ | sed -ne 's/^[^"]*"--\([^"]*\)".*/--\1/p')
+ COMPREPLY=($(compgen -W "$targets" -- "$cur"))
+ return 0
+}
+
+complete -F _v8_flag -f d8
diff --git a/src/3rdparty/v8/tools/check-static-initializers.sh b/src/3rdparty/v8/tools/check-static-initializers.sh
new file mode 100644
index 0000000..e6da828
--- /dev/null
+++ b/src/3rdparty/v8/tools/check-static-initializers.sh
@@ -0,0 +1,55 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Checks that the number of compilation units having at least one static
+# initializer in d8 matches the one defined below.
+# Note that the project must be built with SCons before running this script.
+
+# Allow:
+# - _GLOBAL__I__ZN2v810LineEditor6first_E
+# - _GLOBAL__I__ZN2v88internal32AtomicOps_Internalx86CPUFeaturesE
+# - _GLOBAL__I__ZN2v88internal8ThreadId18highest_thread_id_E
+expected_static_init_count=3
+
+v8_root=$(readlink -f $(dirname $BASH_SOURCE)/../)
+d8="${v8_root}/d8"
+
+if [ ! -f "$d8" ]; then
+ echo "Please build the project with SCons."
+ exit 1
+fi
+
+static_inits=$(nm "$d8" | grep _GLOBAL__I | awk '{ print $NF; }')
+
+static_init_count=$(echo "$static_inits" | wc -l)
+
+if [ $static_init_count -gt $expected_static_init_count ]; then
+ echo "Too many static initializers."
+ echo "$static_inits"
+ exit 1
+fi
diff --git a/src/3rdparty/v8/tools/common-includes.sh b/src/3rdparty/v8/tools/common-includes.sh
new file mode 100644
index 0000000..d1b8b01
--- /dev/null
+++ b/src/3rdparty/v8/tools/common-includes.sh
@@ -0,0 +1,204 @@
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This file contains common function definitions for various other shell
+# scripts in this directory. It is not meant to be executed by itself.
+
+# Important: before including this file, the following variables must be set:
+# - BRANCHNAME
+# - PERSISTFILE_BASENAME
+
+TEMP_BRANCH=$BRANCHNAME-temporary-branch-created-by-script
+VERSION_FILE="src/version.cc"
+CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
+PATCH_FILE="$PERSISTFILE_BASENAME-patch"
+PATCH_OUTPUT_FILE="$PERSISTFILE_BASENAME-patch-output"
+COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
+TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
+TRUNK_REVISION_FILE="$PERSISTFILE_BASENAME-trunkrevision"
+START_STEP=0
+CURRENT_STEP=0
+
+die() {
+ [[ -n "$1" ]] && echo "Error: $1"
+ echo "Exiting."
+ exit 1
+}
+
+confirm() {
+ echo -n "$1 [Y/n] "
+ read ANSWER
+ if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+delete_branch() {
+ local MATCH=$(git branch | grep "$1" | awk '{print $NF}' | grep -x $1)
+ if [ "$MATCH" == "$1" ] ; then
+ confirm "Branch $1 exists, do you want to delete it?"
+ if [ $? -eq 0 ] ; then
+ git branch -D $1 || die "Deleting branch '$1' failed."
+ echo "Branch $1 deleted."
+ else
+ die "Can't continue. Please delete branch $1 and try again."
+ fi
+ fi
+}
+
+# Persist and restore variables to support canceling/resuming execution
+# of this script.
+persist() {
+ local VARNAME=$1
+ local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+ local VALUE="${!VARNAME}"
+ if [ -z "$VALUE" ] ; then
+ VALUE="__EMPTY__"
+ fi
+ echo "$VALUE" > $FILE
+}
+
+restore() {
+ local VARNAME=$1
+ local FILE="$PERSISTFILE_BASENAME-$VARNAME"
+ local VALUE="$(cat $FILE)"
+ [[ -z "$VALUE" ]] && die "Variable '$VARNAME' could not be restored."
+ if [ "$VALUE" == "__EMPTY__" ] ; then
+ VALUE=""
+ fi
+ eval "$VARNAME=\"$VALUE\""
+}
+
+restore_if_unset() {
+ local VARNAME=$1
+ [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
+}
+
+initial_environment_checks() {
+ # Cancel if this is not a git checkout.
+ [[ -d .git ]] \
+ || die "This is not a git checkout, this script won't work for you."
+
+ # Cancel if EDITOR is unset or not executable.
+ [[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
+ || die "Please set your EDITOR environment variable, you'll need it."
+}
+
+common_prepare() {
+ # Check for a clean workdir.
+ [[ -z "$(git status -s -uno)" ]] \
+ || die "Workspace is not clean. Please commit or undo your changes."
+
+ # Persist current branch.
+ CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
+ persist "CURRENT_BRANCH"
+
+ # Fetch unfetched revisions.
+ git svn fetch || die "'git svn fetch' failed."
+
+ # Get ahold of a safe temporary branch and check it out.
+ if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
+ delete_branch $TEMP_BRANCH
+ git checkout -b $TEMP_BRANCH
+ fi
+
+ # Delete the branch that will be created later if it exists already.
+ delete_branch $BRANCHNAME
+}
+
+common_cleanup() {
+ restore_if_unset "CURRENT_BRANCH"
+ git checkout -f $CURRENT_BRANCH
+ [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
+ [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
+ # Clean up all temporary files.
+ rm -f "$PERSISTFILE_BASENAME"*
+}
+
+# These two functions take a prefix for the variable names as first argument.
+read_and_persist_version() {
+ for v in MAJOR_VERSION MINOR_VERSION BUILD_NUMBER PATCH_LEVEL; do
+ VARNAME="$1${v%%_*}"
+ VALUE=$(grep "#define $v" "$VERSION_FILE" | awk '{print $NF}')
+ eval "$VARNAME=\"$VALUE\""
+ persist "$VARNAME"
+ done
+}
+restore_version_if_unset() {
+ for v in MAJOR MINOR BUILD PATCH; do
+ restore_if_unset "$1$v"
+ done
+}
+
+upload_step() {
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Upload for code review."
+ echo -n "Please enter the email address of a V8 reviewer for your patch: "
+ read REVIEWER
+ git cl upload -r "$REVIEWER" --send-mail \
+ || die "'git cl upload' failed, please try again."
+ fi
+}
+
+wait_for_lgtm() {
+ echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
+change. (If you need to iterate on the patch or double check that it's \
+sane, do so in another shell, but remember to not change the headline of \
+the uploaded CL."
+ unset ANSWER
+ while [ "$ANSWER" != "LGTM" ] ; do
+ [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
+ echo -n "> "
+ read ANSWER
+ done
+}
+
+# Takes a file containing the patch to apply as first argument.
+apply_patch() {
+ patch $REVERSE_PATCH -p1 < "$1" > "$PATCH_OUTPUT_FILE" || \
+ { cat "$PATCH_OUTPUT_FILE" && die "Applying the patch failed."; }
+ tee < "$PATCH_OUTPUT_FILE" >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
+ rm "$PATCH_OUTPUT_FILE"
+}
+
+stage_files() {
+ # Stage added and modified files.
+ TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
+ for FILE in $TOUCHED_FILES ; do
+ git add "$FILE"
+ done
+ # Stage deleted files.
+ DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
+ | awk '{print $NF}')
+ for FILE in $DELETED_FILES ; do
+ git rm "$FILE"
+ done
+ rm -f "$TOUCHED_FILES_FILE"
+}
diff --git a/src/3rdparty/v8/tools/disasm.py b/src/3rdparty/v8/tools/disasm.py
index c326382..681b425 100644
--- a/src/3rdparty/v8/tools/disasm.py
+++ b/src/3rdparty/v8/tools/disasm.py
@@ -48,7 +48,8 @@ _DISASM_LINE_RE = re.compile(r"\s*([a-f0-9]+):\s*(\S.*)")
_ARCH_MAP = {
"ia32": "-m i386",
"x64": "-m i386 -M x86-64",
- "arm": "-m arm" # Not supported by our objdump build.
+ "arm": "-m arm", # Not supported by our objdump build.
+ "mips": "-m mips" # Not supported by our objdump build.
}
diff --git a/src/3rdparty/v8/tools/gc-nvp-trace-processor.py b/src/3rdparty/v8/tools/gc-nvp-trace-processor.py
index de3dc90..fe5a7f3 100755
--- a/src/3rdparty/v8/tools/gc-nvp-trace-processor.py
+++ b/src/3rdparty/v8/tools/gc-nvp-trace-processor.py
@@ -219,7 +219,7 @@ def other_scope(r):
if r['gc'] == 's':
# there is no 'other' scope for scavenging collections.
return 0
- return r['pause'] - r['mark'] - r['sweep'] - r['compact'] - r['external']
+ return r['pause'] - r['mark'] - r['sweep'] - r['external']
def scavenge_scope(r):
if r['gc'] == 's':
@@ -238,7 +238,6 @@ plots = [
Plot(Item('Scavenge', scavenge_scope, lc = 'green'),
Item('Marking', 'mark', lc = 'purple'),
Item('Sweep', 'sweep', lc = 'blue'),
- Item('Compaction', 'compact', lc = 'red'),
Item('External', 'external', lc = '#489D43'),
Item('Other', other_scope, lc = 'grey'),
Item('IGC Steps', 'stepstook', lc = '#FF6347'))
@@ -250,7 +249,6 @@ plots = [
Plot(Item('Scavenge', scavenge_scope, lc = 'green'),
Item('Marking', 'mark', lc = 'purple'),
Item('Sweep', 'sweep', lc = 'blue'),
- Item('Compaction', 'compact', lc = 'red'),
Item('External', 'external', lc = '#489D43'),
Item('Other', other_scope, lc = '#ADD8E6'),
Item('External', 'external', lc = '#D3D3D3'))
@@ -309,7 +307,6 @@ def process_trace(filename):
trace = parse_gc_trace(filename)
marksweeps = filter(lambda r: r['gc'] == 'ms', trace)
- markcompacts = filter(lambda r: r['gc'] == 'mc', trace)
scavenges = filter(lambda r: r['gc'] == 's', trace)
globalgcs = filter(lambda r: r['gc'] != 's', trace)
@@ -368,10 +365,8 @@ def process_trace(filename):
stats(out, 'Total in GC', trace, 'pause')
stats(out, 'Scavenge', scavenges, 'pause')
stats(out, 'MarkSweep', marksweeps, 'pause')
- stats(out, 'MarkCompact', markcompacts, 'pause')
stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark')
stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep')
- stats(out, 'Compact', filter(lambda r: r['compact'] != 0, trace), 'compact')
stats(out,
'External',
filter(lambda r: r['external'] != 0, trace),
@@ -379,7 +374,6 @@ def process_trace(filename):
out.write('</table>')
throughput('TOTAL', trace)
throughput('MS', marksweeps)
- throughput('MC', markcompacts)
throughput('OLDSPACE', globalgcs)
out.write('<br/>')
for chart in charts:
diff --git a/src/3rdparty/v8/tools/gcmole/gcmole.cc b/src/3rdparty/v8/tools/gcmole/gcmole.cc
index 71ba24a..38ee6e0 100644
--- a/src/3rdparty/v8/tools/gcmole/gcmole.cc
+++ b/src/3rdparty/v8/tools/gcmole/gcmole.cc
@@ -69,6 +69,21 @@ static bool InV8Namespace(const clang::NamedDecl* decl) {
}
+static std::string EXTERNAL("EXTERNAL");
+static std::string STATE_TAG("enum v8::internal::StateTag");
+
+static bool IsExternalVMState(const clang::ValueDecl* var) {
+ const clang::EnumConstantDecl* enum_constant =
+ dyn_cast<clang::EnumConstantDecl>(var);
+ if (enum_constant != NULL && enum_constant->getNameAsString() == EXTERNAL) {
+ clang::QualType type = enum_constant->getType();
+ return (type.getAsString() == STATE_TAG);
+ }
+
+ return false;
+}
+
+
struct Resolver {
explicit Resolver(clang::ASTContext& ctx)
: ctx_(ctx), decl_ctx_(ctx.getTranslationUnitDecl()) {
@@ -121,6 +136,13 @@ class CalleesPrinter : public clang::RecursiveASTVisitor<CalleesPrinter> {
return true;
}
+ virtual bool VisitDeclRefExpr(clang::DeclRefExpr* expr) {
+ // If function mentions EXTERNAL VMState add artificial garbage collection
+ // mark.
+ if (IsExternalVMState(expr->getDecl())) AddCallee("CollectGarbage");
+ return true;
+ }
+
void AnalyzeFunction(const clang::FunctionDecl* f) {
MangledName name;
if (InV8Namespace(f) && GetMangledName(ctx_, f, &name)) {
@@ -278,6 +300,10 @@ class ExprEffect {
return reinterpret_cast<Environment*>(effect_ & ~kAllEffects);
}
+ static ExprEffect GC() {
+ return ExprEffect(kCausesGC, NULL);
+ }
+
private:
ExprEffect(int effect, Environment* env)
: effect_((effect & kAllEffects) |
@@ -790,6 +816,9 @@ class FunctionAnalyzer {
ExprEffect Use(const clang::Expr* parent,
const clang::ValueDecl* var,
const Environment& env) {
+ if (IsExternalVMState(var)) {
+ return ExprEffect::GC();
+ }
return Use(parent, var->getType(), var->getNameAsString(), env);
}
diff --git a/src/3rdparty/v8/tools/gcmole/gcmole.lua b/src/3rdparty/v8/tools/gcmole/gcmole.lua
index f8d3b62..09db547 100644
--- a/src/3rdparty/v8/tools/gcmole/gcmole.lua
+++ b/src/3rdparty/v8/tools/gcmole/gcmole.lua
@@ -106,7 +106,6 @@ function InvokeClangPluginForEachFile(filenames, cfg, func)
cfg.plugin_args,
cfg.triple,
cfg.arch_define)
-
for _, filename in ipairs(filenames) do
log("-- %s", filename)
local action = cmd_line .. " src/" .. filename .. " 2>&1"
@@ -218,7 +217,13 @@ local WHITELIST = {
-- Callsites of such functions are safe as long as they are properly
-- check return value and propagate the Failure to the caller.
-- It should be possible to extend GCMole to understand this.
- "Heap.*AllocateFunctionPrototype"
+ "Heap.*AllocateFunctionPrototype",
+
+ -- Ignore all StateTag methods.
+ "StateTag",
+
+ -- Ignore printing of elements transition.
+ "PrintElementsTransition"
};
local function AddCause(name, cause)
diff --git a/src/3rdparty/v8/tools/gen-postmortem-metadata.py b/src/3rdparty/v8/tools/gen-postmortem-metadata.py
new file mode 100644
index 0000000..b9b1625
--- /dev/null
+++ b/src/3rdparty/v8/tools/gen-postmortem-metadata.py
@@ -0,0 +1,481 @@
+#!/usr/bin/env python
+
+#
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+#
+# Emits a C++ file to be compiled and linked into libv8 to support postmortem
+# debugging tools. Most importantly, this tool emits constants describing V8
+# internals:
+#
+# v8dbg_type_CLASS__TYPE = VALUE Describes class type values
+# v8dbg_class_CLASS__FIELD__TYPE = OFFSET Describes class fields
+# v8dbg_parent_CLASS__PARENT Describes class hierarchy
+# v8dbg_frametype_NAME = VALUE Describes stack frame values
+# v8dbg_off_fp_NAME = OFFSET Frame pointer offsets
+# v8dbg_prop_NAME = OFFSET Object property offsets
+# v8dbg_NAME = VALUE Miscellaneous values
+#
+# These constants are declared as global integers so that they'll be present in
+# the generated libv8 binary.
+#
+
+import re
+import sys
+
+#
+# Miscellaneous constants, tags, and masks used for object identification.
+#
+consts_misc = [
+ { 'name': 'FirstNonstringType', 'value': 'FIRST_NONSTRING_TYPE' },
+
+ { 'name': 'IsNotStringMask', 'value': 'kIsNotStringMask' },
+ { 'name': 'StringTag', 'value': 'kStringTag' },
+ { 'name': 'NotStringTag', 'value': 'kNotStringTag' },
+
+ { 'name': 'StringEncodingMask', 'value': 'kStringEncodingMask' },
+ { 'name': 'TwoByteStringTag', 'value': 'kTwoByteStringTag' },
+ { 'name': 'AsciiStringTag', 'value': 'kAsciiStringTag' },
+
+ { 'name': 'StringRepresentationMask',
+ 'value': 'kStringRepresentationMask' },
+ { 'name': 'SeqStringTag', 'value': 'kSeqStringTag' },
+ { 'name': 'ConsStringTag', 'value': 'kConsStringTag' },
+ { 'name': 'ExternalStringTag', 'value': 'kExternalStringTag' },
+
+ { 'name': 'FailureTag', 'value': 'kFailureTag' },
+ { 'name': 'FailureTagMask', 'value': 'kFailureTagMask' },
+ { 'name': 'HeapObjectTag', 'value': 'kHeapObjectTag' },
+ { 'name': 'HeapObjectTagMask', 'value': 'kHeapObjectTagMask' },
+ { 'name': 'SmiTag', 'value': 'kSmiTag' },
+ { 'name': 'SmiTagMask', 'value': 'kSmiTagMask' },
+ { 'name': 'SmiValueShift', 'value': 'kSmiTagSize' },
+ { 'name': 'PointerSizeLog2', 'value': 'kPointerSizeLog2' },
+
+ { 'name': 'prop_idx_content',
+ 'value': 'DescriptorArray::kContentArrayIndex' },
+ { 'name': 'prop_idx_first',
+ 'value': 'DescriptorArray::kFirstIndex' },
+ { 'name': 'prop_type_field',
+ 'value': 'FIELD' },
+ { 'name': 'prop_type_first_phantom',
+ 'value': 'MAP_TRANSITION' },
+ { 'name': 'prop_type_mask',
+ 'value': 'PropertyDetails::TypeField::kMask' },
+
+ { 'name': 'off_fp_context',
+ 'value': 'StandardFrameConstants::kContextOffset' },
+ { 'name': 'off_fp_marker',
+ 'value': 'StandardFrameConstants::kMarkerOffset' },
+ { 'name': 'off_fp_function',
+ 'value': 'JavaScriptFrameConstants::kFunctionOffset' },
+ { 'name': 'off_fp_args',
+ 'value': 'JavaScriptFrameConstants::kLastParameterOffset' },
+];
+
+#
+# The following useful fields are missing accessors, so we define fake ones.
+#
+extras_accessors = [
+ 'HeapObject, map, Map, kMapOffset',
+ 'JSObject, elements, Object, kElementsOffset',
+ 'FixedArray, data, uintptr_t, kHeaderSize',
+ 'Map, instance_attributes, int, kInstanceAttributesOffset',
+ 'Map, instance_descriptors, int, kInstanceDescriptorsOrBitField3Offset',
+ 'Map, inobject_properties, int, kInObjectPropertiesOffset',
+ 'Map, instance_size, int, kInstanceSizeOffset',
+ 'HeapNumber, value, double, kValueOffset',
+ 'ConsString, first, String, kFirstOffset',
+ 'ConsString, second, String, kSecondOffset',
+ 'ExternalString, resource, Object, kResourceOffset',
+ 'SeqAsciiString, chars, char, kHeaderSize',
+ 'SharedFunctionInfo, code, Code, kCodeOffset',
+ 'Code, instruction_start, uintptr_t, kHeaderSize',
+ 'Code, instruction_size, int, kInstructionSizeOffset',
+];
+
+#
+# The following is a whitelist of classes we expect to find when scanning the
+# source code. This list is not exhaustive, but it's still useful to identify
+# when this script gets out of sync with the source. See load_objects().
+#
+expected_classes = [
+ 'ConsString', 'FixedArray', 'HeapNumber', 'JSArray', 'JSFunction',
+ 'JSObject', 'JSRegExp', 'JSValue', 'Map', 'Oddball', 'Script',
+ 'SeqAsciiString', 'SharedFunctionInfo'
+];
+
+
+#
+# The following structures store high-level representations of the structures
+# for which we're going to emit descriptive constants.
+#
+types = {}; # set of all type names
+typeclasses = {}; # maps type names to corresponding class names
+klasses = {}; # known classes, including parents
+fields = []; # field declarations
+
+header = '''
+/*
+ * This file is generated by %s. Do not edit directly.
+ */
+
+#include "v8.h"
+#include "frames.h"
+#include "frames-inl.h" /* for architecture-specific frame constants */
+
+using namespace v8::internal;
+
+extern "C" {
+
+/* stack frame constants */
+#define FRAME_CONST(value, klass) \
+ int v8dbg_frametype_##klass = StackFrame::value;
+
+STACK_FRAME_TYPE_LIST(FRAME_CONST)
+
+#undef FRAME_CONST
+
+''' % sys.argv[0];
+
+footer = '''
+}
+'''
+
+#
+# Loads class hierarchy and type information from "objects.h".
+#
+def load_objects():
+ objfilename = sys.argv[2];
+ objfile = open(objfilename, 'r');
+ in_insttype = False;
+
+ typestr = '';
+
+ #
+ # Construct a dictionary for the classes we're sure should be present.
+ #
+ checktypes = {};
+ for klass in expected_classes:
+ checktypes[klass] = True;
+
+ #
+ # Iterate objects.h line-by-line to collect type and class information.
+ # For types, we accumulate a string representing the entire InstanceType
+ # enum definition and parse it later because it's easier to do so
+ # without the embedded newlines.
+ #
+ for line in objfile:
+ if (line.startswith('enum InstanceType {')):
+ in_insttype = True;
+ continue;
+
+ if (in_insttype and line.startswith('};')):
+ in_insttype = False;
+ continue;
+
+ line = re.sub('//.*', '', line.rstrip().lstrip());
+
+ if (in_insttype):
+ typestr += line;
+ continue;
+
+ match = re.match('class (\w[^\s:]*)(: public (\w[^\s{]*))?\s*{',
+ line);
+
+ if (match):
+ klass = match.group(1);
+ pklass = match.group(3);
+ klasses[klass] = { 'parent': pklass };
+
+ #
+ # Process the instance type declaration.
+ #
+ entries = typestr.split(',');
+ for entry in entries:
+ types[re.sub('\s*=.*', '', entry).lstrip()] = True;
+
+ #
+ # Infer class names for each type based on a systematic transformation.
+ # For example, "JS_FUNCTION_TYPE" becomes "JSFunction". We find the
+ # class for each type rather than the other way around because there are
+ # fewer cases where one type maps to more than one class than the other
+ # way around.
+ #
+ for type in types:
+ #
+ # Symbols and Strings are implemented using the same classes.
+ #
+ usetype = re.sub('SYMBOL_', 'STRING_', type);
+
+ #
+ # REGEXP behaves like REG_EXP, as in JS_REGEXP_TYPE => JSRegExp.
+ #
+ usetype = re.sub('_REGEXP_', '_REG_EXP_', usetype);
+
+ #
+ # Remove the "_TYPE" suffix and then convert to camel case,
+ # except that a "JS" prefix remains uppercase (as in
+ # "JS_FUNCTION_TYPE" => "JSFunction").
+ #
+ if (not usetype.endswith('_TYPE')):
+ continue;
+
+ usetype = usetype[0:len(usetype) - len('_TYPE')];
+ parts = usetype.split('_');
+ cctype = '';
+
+ if (parts[0] == 'JS'):
+ cctype = 'JS';
+ start = 1;
+ else:
+ cctype = '';
+ start = 0;
+
+ for ii in range(start, len(parts)):
+ part = parts[ii];
+ cctype += part[0].upper() + part[1:].lower();
+
+ #
+ # Mapping string types is more complicated. Both types and
+ # class names for Strings specify a representation (e.g., Seq,
+ # Cons, External, or Sliced) and an encoding (TwoByte or Ascii),
+ # In the simplest case, both of these are explicit in both
+ # names, as in:
+ #
+ # EXTERNAL_ASCII_STRING_TYPE => ExternalAsciiString
+ #
+ # However, either the representation or encoding can be omitted
+ # from the type name, in which case "Seq" and "TwoByte" are
+ # assumed, as in:
+ #
+ # STRING_TYPE => SeqTwoByteString
+ #
+ # Additionally, sometimes the type name has more information
+ # than the class, as in:
+ #
+ # CONS_ASCII_STRING_TYPE => ConsString
+ #
+ # To figure this out dynamically, we first check for a
+ # representation and encoding and add them if they're not
+ # present. If that doesn't yield a valid class name, then we
+ # strip out the representation.
+ #
+ if (cctype.endswith('String')):
+ if (cctype.find('Cons') == -1 and
+ cctype.find('External') == -1 and
+ cctype.find('Sliced') == -1):
+ if (cctype.find('Ascii') != -1):
+ cctype = re.sub('AsciiString$',
+ 'SeqAsciiString', cctype);
+ else:
+ cctype = re.sub('String$',
+ 'SeqString', cctype);
+
+ if (cctype.find('Ascii') == -1):
+ cctype = re.sub('String$', 'TwoByteString',
+ cctype);
+
+ if (not (cctype in klasses)):
+ cctype = re.sub('Ascii', '', cctype);
+ cctype = re.sub('TwoByte', '', cctype);
+
+ #
+ # Despite all that, some types have no corresponding class.
+ #
+ if (cctype in klasses):
+ typeclasses[type] = cctype;
+ if (cctype in checktypes):
+ del checktypes[cctype];
+
+ if (len(checktypes) > 0):
+ for klass in checktypes:
+ print('error: expected class \"%s\" not found' % klass);
+
+ sys.exit(1);
+
+
+#
+# For a given macro call, pick apart the arguments and return an object
+# describing the corresponding output constant. See load_fields().
+#
+def parse_field(call):
+ # Replace newlines with spaces.
+ for ii in range(0, len(call)):
+ if (call[ii] == '\n'):
+ call[ii] == ' ';
+
+ idx = call.find('(');
+ kind = call[0:idx];
+ rest = call[idx + 1: len(call) - 1];
+ args = re.split('\s*,\s*', rest);
+
+ consts = [];
+
+ if (kind == 'ACCESSORS' or kind == 'ACCESSORS_GCSAFE'):
+ klass = args[0];
+ field = args[1];
+ dtype = args[2];
+ offset = args[3];
+
+ return ({
+ 'name': 'class_%s__%s__%s' % (klass, field, dtype),
+ 'value': '%s::%s' % (klass, offset)
+ });
+
+ assert(kind == 'SMI_ACCESSORS');
+ klass = args[0];
+ field = args[1];
+ offset = args[2];
+
+ return ({
+ 'name': 'class_%s__%s__%s' % (klass, field, 'SMI'),
+ 'value': '%s::%s' % (klass, offset)
+ });
+
+#
+# Load field offset information from objects-inl.h.
+#
+def load_fields():
+ inlfilename = sys.argv[3];
+ inlfile = open(inlfilename, 'r');
+
+ #
+ # Each class's fields and the corresponding offsets are described in the
+ # source by calls to macros like "ACCESSORS" (and friends). All we do
+ # here is extract these macro invocations, taking into account that they
+ # may span multiple lines and may contain nested parentheses. We also
+ # call parse_field() to pick apart the invocation.
+ #
+ prefixes = [ 'ACCESSORS', 'ACCESSORS_GCSAFE', 'SMI_ACCESSORS' ];
+ current = '';
+ opens = 0;
+
+ for line in inlfile:
+ if (opens > 0):
+ # Continuation line
+ for ii in range(0, len(line)):
+ if (line[ii] == '('):
+ opens += 1;
+ elif (line[ii] == ')'):
+ opens -= 1;
+
+ if (opens == 0):
+ break;
+
+ current += line[0:ii + 1];
+ continue;
+
+ for prefix in prefixes:
+ if (not line.startswith(prefix + '(')):
+ continue;
+
+ if (len(current) > 0):
+ fields.append(parse_field(current));
+ current = '';
+
+ for ii in range(len(prefix), len(line)):
+ if (line[ii] == '('):
+ opens += 1;
+ elif (line[ii] == ')'):
+ opens -= 1;
+
+ if (opens == 0):
+ break;
+
+ current += line[0:ii + 1];
+
+ if (len(current) > 0):
+ fields.append(parse_field(current));
+ current = '';
+
+ for body in extras_accessors:
+ fields.append(parse_field('ACCESSORS(%s)' % body));
+
+#
+# Emit a block of constants.
+#
+def emit_set(out, consts):
+ for ii in range(0, len(consts)):
+ out.write('int v8dbg_%s = %s;\n' %
+ (consts[ii]['name'], consts[ii]['value']));
+ out.write('\n');
+
+#
+# Emit the whole output file.
+#
+def emit_config():
+ out = file(sys.argv[1], 'w');
+
+ out.write(header);
+
+ out.write('/* miscellaneous constants */\n');
+ emit_set(out, consts_misc);
+
+ out.write('/* class type information */\n');
+ consts = [];
+ keys = typeclasses.keys();
+ keys.sort();
+ for typename in keys:
+ klass = typeclasses[typename];
+ consts.append({
+ 'name': 'type_%s__%s' % (klass, typename),
+ 'value': typename
+ });
+
+ emit_set(out, consts);
+
+ out.write('/* class hierarchy information */\n');
+ consts = [];
+ keys = klasses.keys();
+ keys.sort();
+ for klassname in keys:
+ pklass = klasses[klassname]['parent'];
+ if (pklass == None):
+ continue;
+
+ consts.append({
+ 'name': 'parent_%s__%s' % (klassname, pklass),
+ 'value': 0
+ });
+
+ emit_set(out, consts);
+
+ out.write('/* field information */\n');
+ emit_set(out, fields);
+
+ out.write(footer);
+
+if (len(sys.argv) < 4):
+ print('usage: %s output.cc objects.h objects-inl.h' % sys.argv[0]);
+ sys.exit(2);
+
+load_objects();
+load_fields();
+emit_config();
diff --git a/src/3rdparty/v8/tools/grokdump.py b/src/3rdparty/v8/tools/grokdump.py
index 6bc49c6..9977289 100755
--- a/src/3rdparty/v8/tools/grokdump.py
+++ b/src/3rdparty/v8/tools/grokdump.py
@@ -52,6 +52,7 @@ Examples:
$ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp
"""
+
DEBUG=False
@@ -233,6 +234,80 @@ MINIDUMP_CONTEXT_X86 = Descriptor([
MD_CONTEXT_X86_EXTENDED_REGISTERS))
])
+MD_CONTEXT_AMD64 = 0x00100000
+MD_CONTEXT_AMD64_CONTROL = (MD_CONTEXT_AMD64 | 0x00000001)
+MD_CONTEXT_AMD64_INTEGER = (MD_CONTEXT_AMD64 | 0x00000002)
+MD_CONTEXT_AMD64_SEGMENTS = (MD_CONTEXT_AMD64 | 0x00000004)
+MD_CONTEXT_AMD64_FLOATING_POINT = (MD_CONTEXT_AMD64 | 0x00000008)
+MD_CONTEXT_AMD64_DEBUG_REGISTERS = (MD_CONTEXT_AMD64 | 0x00000010)
+
+MINIDUMP_CONTEXT_AMD64 = Descriptor([
+ ("p1_home", ctypes.c_uint64),
+ ("p2_home", ctypes.c_uint64),
+ ("p3_home", ctypes.c_uint64),
+ ("p4_home", ctypes.c_uint64),
+ ("p5_home", ctypes.c_uint64),
+ ("p6_home", ctypes.c_uint64),
+ ("context_flags", ctypes.c_uint32),
+ ("mx_csr", ctypes.c_uint32),
+ # MD_CONTEXT_AMD64_CONTROL.
+ ("cs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
+ # MD_CONTEXT_AMD64_SEGMENTS
+ ("ds", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
+ ("es", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
+ ("fs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
+ ("gs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
+ # MD_CONTEXT_AMD64_CONTROL.
+ ("ss", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
+ ("eflags", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_AMD64_CONTROL)),
+ # MD_CONTEXT_AMD64_DEBUG_REGISTERS.
+ ("dr0", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("dr1", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("dr2", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("dr3", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("dr6", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("dr7", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ # MD_CONTEXT_AMD64_INTEGER.
+ ("rax", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("rcx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("rdx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("rbx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ # MD_CONTEXT_AMD64_CONTROL.
+ ("rsp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
+ # MD_CONTEXT_AMD64_INTEGER.
+ ("rbp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("rsi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("rdi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r8", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r9", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r10", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r11", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r12", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r13", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r14", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ ("r15", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
+ # MD_CONTEXT_AMD64_CONTROL.
+ ("rip", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
+ # MD_CONTEXT_AMD64_FLOATING_POINT
+ ("sse_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
+ MD_CONTEXT_AMD64_FLOATING_POINT)),
+ ("vector_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
+ MD_CONTEXT_AMD64_FLOATING_POINT)),
+ ("vector_control", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_FLOATING_POINT)),
+ # MD_CONTEXT_AMD64_DEBUG_REGISTERS.
+ ("debug_control", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("last_branch_to_rip", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("last_branch_from_rip", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("last_exception_to_rip", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
+ ("last_exception_from_rip", EnableOnFlag(ctypes.c_uint64,
+ MD_CONTEXT_AMD64_DEBUG_REGISTERS))
+])
+
MINIDUMP_MEMORY_DESCRIPTOR = Descriptor([
("start", ctypes.c_uint64),
("memory", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
@@ -269,6 +344,12 @@ MINIDUMP_THREAD_LIST = Descriptor([
("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count)
])
+MINIDUMP_RAW_SYSTEM_INFO = Descriptor([
+ ("processor_architecture", ctypes.c_uint16)
+])
+
+MD_CPU_ARCHITECTURE_X86 = 0
+MD_CPU_ARCHITECTURE_AMD64 = 9
class MinidumpReader(object):
"""Minidump (.dmp) reader."""
@@ -288,20 +369,34 @@ class MinidumpReader(object):
for _ in xrange(self.header.stream_count):
directories.append(MINIDUMP_DIRECTORY.Read(self.minidump, offset))
offset += MINIDUMP_DIRECTORY.size
+ self.arch = None
self.exception = None
self.exception_context = None
self.memory_list = None
self.memory_list64 = None
self.thread_map = {}
+
+ # Find MDRawSystemInfo stream and determine arch.
+ for d in directories:
+ if d.stream_type == MD_SYSTEM_INFO_STREAM:
+ system_info = MINIDUMP_RAW_SYSTEM_INFO.Read(
+ self.minidump, d.location.rva)
+ self.arch = system_info.processor_architecture
+ assert self.arch in [MD_CPU_ARCHITECTURE_AMD64, MD_CPU_ARCHITECTURE_X86]
+ assert not self.arch is None
+
for d in directories:
DebugPrint(d)
- # TODO(vitalyr): extract system info including CPU features.
if d.stream_type == MD_EXCEPTION_STREAM:
self.exception = MINIDUMP_EXCEPTION_STREAM.Read(
self.minidump, d.location.rva)
DebugPrint(self.exception)
- self.exception_context = MINIDUMP_CONTEXT_X86.Read(
- self.minidump, self.exception.thread_context.rva)
+ if self.arch == MD_CPU_ARCHITECTURE_X86:
+ self.exception_context = MINIDUMP_CONTEXT_X86.Read(
+ self.minidump, self.exception.thread_context.rva)
+ elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ self.exception_context = MINIDUMP_CONTEXT_AMD64.Read(
+ self.minidump, self.exception.thread_context.rva)
DebugPrint(self.exception_context)
elif d.stream_type == MD_THREAD_LIST_STREAM:
thread_list = MINIDUMP_THREAD_LIST.Read(self.minidump, d.location.rva)
@@ -335,6 +430,16 @@ class MinidumpReader(object):
location = self.FindLocation(address)
return ctypes.c_uint32.from_buffer(self.minidump, location).value
+ def ReadU64(self, address):
+ location = self.FindLocation(address)
+ return ctypes.c_uint64.from_buffer(self.minidump, location).value
+
+ def ReadUIntPtr(self, address):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return self.ReadU64(address)
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return self.ReadU32(address)
+
def ReadBytes(self, address, size):
location = self.FindLocation(address)
return self.minidump[location:location + size]
@@ -355,10 +460,15 @@ class MinidumpReader(object):
def GetDisasmLines(self, address, size):
location = self.FindLocation(address)
if location is None: return []
+ arch = None
+ if self.arch == MD_CPU_ARCHITECTURE_X86:
+ arch = "ia32"
+ elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ arch = "x64"
return disasm.GetDisasmLines(self.minidump_name,
location,
size,
- "ia32",
+ arch,
False)
@@ -366,13 +476,40 @@ class MinidumpReader(object):
self.minidump.close()
self.minidump_file.close()
+ def ExceptionIP(self):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return self.exception_context.rip
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return self.exception_context.eip
+
+ def ExceptionSP(self):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return self.exception_context.rsp
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return self.exception_context.esp
+
+ def FormatIntPtr(self, value):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return "%016x" % value
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return "%08x" % value
+
+ def PointerSize(self):
+ if self.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return 8
+ elif self.arch == MD_CPU_ARCHITECTURE_X86:
+ return 4
+
+ def Register(self, name):
+ return self.exception_context.__getattribute__(name)
+
# List of V8 instance types. Obtained by adding the code below to any .cc file.
#
-# #define DUMP_TYPE(T) printf("%d: \"%s\",\n", T, #T);
+# #define DUMP_TYPE(T) printf(" %d: \"%s\",\n", T, #T);
# struct P {
# P() {
-# printf("{\n");
+# printf("INSTANCE_TYPES = {\n");
# INSTANCE_TYPE_LIST(DUMP_TYPE)
# printf("}\n");
# }
@@ -386,13 +523,20 @@ INSTANCE_TYPES = {
66: "EXTERNAL_SYMBOL_TYPE",
74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
70: "EXTERNAL_ASCII_SYMBOL_TYPE",
+ 82: "SHORT_EXTERNAL_SYMBOL_TYPE",
+ 90: "SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
+ 86: "SHORT_EXTERNAL_ASCII_SYMBOL_TYPE",
0: "STRING_TYPE",
4: "ASCII_STRING_TYPE",
1: "CONS_STRING_TYPE",
5: "CONS_ASCII_STRING_TYPE",
+ 3: "SLICED_STRING_TYPE",
2: "EXTERNAL_STRING_TYPE",
10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
6: "EXTERNAL_ASCII_STRING_TYPE",
+ 18: "SHORT_EXTERNAL_STRING_TYPE",
+ 26: "SHORT_EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
+ 22: "SHORT_EXTERNAL_ASCII_STRING_TYPE",
6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
128: "MAP_TYPE",
129: "CODE_TYPE",
@@ -401,43 +545,46 @@ INSTANCE_TYPES = {
132: "HEAP_NUMBER_TYPE",
133: "FOREIGN_TYPE",
134: "BYTE_ARRAY_TYPE",
- 135: "EXTERNAL_BYTE_ARRAY_TYPE",
- 136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
- 137: "EXTERNAL_SHORT_ARRAY_TYPE",
- 138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
- 139: "EXTERNAL_INT_ARRAY_TYPE",
- 140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
- 141: "EXTERNAL_FLOAT_ARRAY_TYPE",
- 143: "EXTERNAL_PIXEL_ARRAY_TYPE",
- 145: "FILLER_TYPE",
- 146: "ACCESSOR_INFO_TYPE",
- 147: "ACCESS_CHECK_INFO_TYPE",
- 148: "INTERCEPTOR_INFO_TYPE",
- 149: "CALL_HANDLER_INFO_TYPE",
- 150: "FUNCTION_TEMPLATE_INFO_TYPE",
- 151: "OBJECT_TEMPLATE_INFO_TYPE",
- 152: "SIGNATURE_INFO_TYPE",
- 153: "TYPE_SWITCH_INFO_TYPE",
- 154: "SCRIPT_TYPE",
- 155: "CODE_CACHE_TYPE",
- 156: "POLYMORPHIC_CODE_CACHE_TYPE",
- 159: "FIXED_ARRAY_TYPE",
- 160: "SHARED_FUNCTION_INFO_TYPE",
- 161: "JS_MESSAGE_OBJECT_TYPE",
- 162: "JS_VALUE_TYPE",
- 163: "JS_OBJECT_TYPE",
- 164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
- 165: "JS_GLOBAL_OBJECT_TYPE",
- 166: "JS_BUILTINS_OBJECT_TYPE",
- 167: "JS_GLOBAL_PROXY_TYPE",
- 168: "JS_ARRAY_TYPE",
- 169: "JS_PROXY_TYPE",
- 170: "JS_WEAK_MAP_TYPE",
- 171: "JS_REGEXP_TYPE",
- 172: "JS_FUNCTION_TYPE",
- 173: "JS_FUNCTION_PROXY_TYPE",
- 157: "DEBUG_INFO_TYPE",
- 158: "BREAK_POINT_INFO_TYPE",
+ 135: "FREE_SPACE_TYPE",
+ 136: "EXTERNAL_BYTE_ARRAY_TYPE",
+ 137: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
+ 138: "EXTERNAL_SHORT_ARRAY_TYPE",
+ 139: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
+ 140: "EXTERNAL_INT_ARRAY_TYPE",
+ 141: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
+ 142: "EXTERNAL_FLOAT_ARRAY_TYPE",
+ 144: "EXTERNAL_PIXEL_ARRAY_TYPE",
+ 146: "FILLER_TYPE",
+ 147: "ACCESSOR_INFO_TYPE",
+ 148: "ACCESSOR_PAIR_TYPE",
+ 149: "ACCESS_CHECK_INFO_TYPE",
+ 150: "INTERCEPTOR_INFO_TYPE",
+ 151: "CALL_HANDLER_INFO_TYPE",
+ 152: "FUNCTION_TEMPLATE_INFO_TYPE",
+ 153: "OBJECT_TEMPLATE_INFO_TYPE",
+ 154: "SIGNATURE_INFO_TYPE",
+ 155: "TYPE_SWITCH_INFO_TYPE",
+ 156: "SCRIPT_TYPE",
+ 157: "CODE_CACHE_TYPE",
+ 158: "POLYMORPHIC_CODE_CACHE_TYPE",
+ 161: "FIXED_ARRAY_TYPE",
+ 145: "FIXED_DOUBLE_ARRAY_TYPE",
+ 162: "SHARED_FUNCTION_INFO_TYPE",
+ 163: "JS_MESSAGE_OBJECT_TYPE",
+ 166: "JS_VALUE_TYPE",
+ 167: "JS_OBJECT_TYPE",
+ 168: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+ 169: "JS_GLOBAL_OBJECT_TYPE",
+ 170: "JS_BUILTINS_OBJECT_TYPE",
+ 171: "JS_GLOBAL_PROXY_TYPE",
+ 172: "JS_ARRAY_TYPE",
+ 165: "JS_PROXY_TYPE",
+ 175: "JS_WEAK_MAP_TYPE",
+ 176: "JS_REGEXP_TYPE",
+ 177: "JS_FUNCTION_TYPE",
+ 164: "JS_FUNCTION_PROXY_TYPE",
+ 159: "DEBUG_INFO_TYPE",
+ 160: "BREAK_POINT_INFO_TYPE",
}
@@ -501,34 +648,36 @@ class HeapObject(object):
p.Print(str(self))
def __str__(self):
- return "HeapObject(%08x, %s)" % (self.address,
- INSTANCE_TYPES[self.map.instance_type])
+ return "HeapObject(%s, %s)" % (self.heap.reader.FormatIntPtr(self.address),
+ INSTANCE_TYPES[self.map.instance_type])
def ObjectField(self, offset):
- field_value = self.heap.reader.ReadU32(self.address + offset)
+ field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
return self.heap.FindObjectOrSmi(field_value)
def SmiField(self, offset):
- field_value = self.heap.reader.ReadU32(self.address + offset)
+ field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
assert (field_value & 1) == 0
return field_value / 2
class Map(HeapObject):
- INSTANCE_TYPE_OFFSET = 8
+ def InstanceTypeOffset(self):
+ return self.heap.PointerSize() + self.heap.IntSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.instance_type = \
- heap.reader.ReadU8(self.address + Map.INSTANCE_TYPE_OFFSET)
+ heap.reader.ReadU8(self.address + self.InstanceTypeOffset())
class String(HeapObject):
- LENGTH_OFFSET = 4
+ def LengthOffset(self):
+ return self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.length = self.SmiField(String.LENGTH_OFFSET)
+ self.length = self.SmiField(self.LengthOffset())
def GetChars(self):
return "?string?"
@@ -541,11 +690,12 @@ class String(HeapObject):
class SeqString(String):
- CHARS_OFFSET = 12
+ def CharsOffset(self):
+ return self.heap.PointerSize() * 3
def __init__(self, heap, map, address):
String.__init__(self, heap, map, address)
- self.chars = heap.reader.ReadBytes(self.address + SeqString.CHARS_OFFSET,
+ self.chars = heap.reader.ReadBytes(self.address + self.CharsOffset(),
self.length)
def GetChars(self):
@@ -553,6 +703,7 @@ class SeqString(String):
class ExternalString(String):
+ # TODO(vegorov) fix ExternalString for X64 architecture
RESOURCE_OFFSET = 12
WEBKIT_RESOUCE_STRING_IMPL_OFFSET = 4
@@ -582,24 +733,28 @@ class ExternalString(String):
class ConsString(String):
- LEFT_OFFSET = 12
- RIGHT_OFFSET = 16
+ def LeftOffset(self):
+ return self.heap.PointerSize() * 3
+
+ def RightOffset(self):
+ return self.heap.PointerSize() * 4
def __init__(self, heap, map, address):
String.__init__(self, heap, map, address)
- self.left = self.ObjectField(ConsString.LEFT_OFFSET)
- self.right = self.ObjectField(ConsString.RIGHT_OFFSET)
+ self.left = self.ObjectField(self.LeftOffset())
+ self.right = self.ObjectField(self.RightOffset())
def GetChars(self):
return self.left.GetChars() + self.right.GetChars()
class Oddball(HeapObject):
- TO_STRING_OFFSET = 4
+ def ToStringOffset(self):
+ return self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.to_string = self.ObjectField(Oddball.TO_STRING_OFFSET)
+ self.to_string = self.ObjectField(self.ToStringOffset())
def Print(self, p):
p.Print(str(self))
@@ -609,19 +764,23 @@ class Oddball(HeapObject):
class FixedArray(HeapObject):
- LENGTH_OFFSET = 4
- ELEMENTS_OFFSET = 8
+ def LengthOffset(self):
+ return self.heap.PointerSize()
+
+ def ElementsOffset(self):
+ return self.heap.PointerSize() * 2
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.length = self.SmiField(FixedArray.LENGTH_OFFSET)
+ self.length = self.SmiField(self.LengthOffset())
def Print(self, p):
- p.Print("FixedArray(%08x) {" % self.address)
+ p.Print("FixedArray(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("length: %d" % self.length)
+ base_offset = self.ElementsOffset()
for i in xrange(self.length):
- offset = FixedArray.ELEMENTS_OFFSET + 4 * i
+ offset = base_offset + 4 * i
p.Print("[%08d] = %s" % (i, self.ObjectField(offset)))
p.Dedent()
p.Print("}")
@@ -631,19 +790,22 @@ class FixedArray(HeapObject):
class JSFunction(HeapObject):
- CODE_ENTRY_OFFSET = 12
- SHARED_OFFSET = 20
+ def CodeEntryOffset(self):
+ return 3 * self.heap.PointerSize()
+
+ def SharedOffset(self):
+ return 5 * self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
code_entry = \
- heap.reader.ReadU32(self.address + JSFunction.CODE_ENTRY_OFFSET)
- self.code = heap.FindObject(code_entry - Code.ENTRY_OFFSET + 1)
- self.shared = self.ObjectField(JSFunction.SHARED_OFFSET)
+ heap.reader.ReadU32(self.address + self.CodeEntryOffset())
+ self.code = heap.FindObject(code_entry - Code.HeaderSize(heap) + 1)
+ self.shared = self.ObjectField(self.SharedOffset())
def Print(self, p):
source = "\n".join(" %s" % line for line in self._GetSource().split("\n"))
- p.Print("JSFunction(%08x) {" % self.address)
+ p.Print("JSFunction(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("inferred name: %s" % self.shared.inferred_name)
if self.shared.script.Is(Script) and self.shared.script.name.Is(String):
@@ -662,7 +824,8 @@ class JSFunction(HeapObject):
inferred_name = ""
if self.shared.Is(SharedFunctionInfo):
inferred_name = self.shared.inferred_name
- return "JSFunction(%08x, %s)" % (self.address, inferred_name)
+ return "JSFunction(%s, %s)" % \
+ (self.heap.reader.FormatIntPtr(self.address), inferred_name)
def _GetSource(self):
source = "?source?"
@@ -675,47 +838,75 @@ class JSFunction(HeapObject):
class SharedFunctionInfo(HeapObject):
- CODE_OFFSET = 2 * 4
- SCRIPT_OFFSET = 7 * 4
- INFERRED_NAME_OFFSET = 9 * 4
- START_POSITION_AND_TYPE_OFFSET = 17 * 4
- END_POSITION_OFFSET = 18 * 4
+ def CodeOffset(self):
+ return 2 * self.heap.PointerSize()
+
+ def ScriptOffset(self):
+ return 7 * self.heap.PointerSize()
+
+ def InferredNameOffset(self):
+ return 9 * self.heap.PointerSize()
+
+ def EndPositionOffset(self):
+ return 12 * self.heap.PointerSize() + 4 * self.heap.IntSize()
+
+ def StartPositionAndTypeOffset(self):
+ return 12 * self.heap.PointerSize() + 5 * self.heap.IntSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.code = self.ObjectField(SharedFunctionInfo.CODE_OFFSET)
- self.script = self.ObjectField(SharedFunctionInfo.SCRIPT_OFFSET)
- self.inferred_name = \
- self.ObjectField(SharedFunctionInfo.INFERRED_NAME_OFFSET)
- start_position_and_type = \
- self.SmiField(SharedFunctionInfo.START_POSITION_AND_TYPE_OFFSET)
- self.start_position = start_position_and_type >> 2
- self.end_position = self.SmiField(SharedFunctionInfo.END_POSITION_OFFSET)
+ self.code = self.ObjectField(self.CodeOffset())
+ self.script = self.ObjectField(self.ScriptOffset())
+ self.inferred_name = self.ObjectField(self.InferredNameOffset())
+ if heap.PointerSize() == 8:
+ start_position_and_type = \
+ heap.reader.ReadU32(self.StartPositionAndTypeOffset())
+ self.start_position = start_position_and_type >> 2
+ pseudo_smi_end_position = \
+ heap.reader.ReadU32(self.EndPositionOffset())
+ self.end_position = pseudo_smi_end_position >> 2
+ else:
+ start_position_and_type = \
+ self.SmiField(self.StartPositionAndTypeOffset())
+ self.start_position = start_position_and_type >> 2
+ self.end_position = \
+ self.SmiField(self.EndPositionOffset())
class Script(HeapObject):
- SOURCE_OFFSET = 4
- NAME_OFFSET = 8
+ def SourceOffset(self):
+ return self.heap.PointerSize()
+
+ def NameOffset(self):
+ return self.SourceOffset() + self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.source = self.ObjectField(Script.SOURCE_OFFSET)
- self.name = self.ObjectField(Script.NAME_OFFSET)
+ self.source = self.ObjectField(self.SourceOffset())
+ self.name = self.ObjectField(self.NameOffset())
class Code(HeapObject):
- INSTRUCTION_SIZE_OFFSET = 4
- ENTRY_OFFSET = 32
+ CODE_ALIGNMENT_MASK = (1 << 5) - 1
+
+ def InstructionSizeOffset(self):
+ return self.heap.PointerSize()
+
+ @staticmethod
+ def HeaderSize(heap):
+ return (heap.PointerSize() + heap.IntSize() + \
+ 4 * heap.PointerSize() + 3 * heap.IntSize() + \
+ Code.CODE_ALIGNMENT_MASK) & ~Code.CODE_ALIGNMENT_MASK
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
- self.entry = self.address + Code.ENTRY_OFFSET
+ self.entry = self.address + Code.HeaderSize(heap)
self.instruction_size = \
- heap.reader.ReadU32(self.address + Code.INSTRUCTION_SIZE_OFFSET)
+ heap.reader.ReadU32(self.address + self.InstructionSizeOffset())
def Print(self, p):
lines = self.heap.reader.GetDisasmLines(self.entry, self.instruction_size)
- p.Print("Code(%08x) {" % self.address)
+ p.Print("Code(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("instruction_size: %d" % self.instruction_size)
p.PrintLines(self._FormatLine(line) for line in lines)
@@ -735,6 +926,9 @@ class V8Heap(object):
"EXTERNAL_SYMBOL_TYPE": ExternalString,
"EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE": ExternalString,
"EXTERNAL_ASCII_SYMBOL_TYPE": ExternalString,
+ "SHORT_EXTERNAL_SYMBOL_TYPE": ExternalString,
+ "SHORT_EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE": ExternalString,
+ "SHORT_EXTERNAL_ASCII_SYMBOL_TYPE": ExternalString,
"STRING_TYPE": SeqString,
"ASCII_STRING_TYPE": SeqString,
"CONS_STRING_TYPE": ConsString,
@@ -764,10 +958,10 @@ class V8Heap(object):
def FindObject(self, tagged_address):
if tagged_address in self.objects:
return self.objects[tagged_address]
- if (tagged_address & 1) != 1: return None
+ if (tagged_address & self.ObjectAlignmentMask()) != 1: return None
address = tagged_address - 1
if not self.reader.IsValidAddress(address): return None
- map_tagged_address = self.reader.ReadU32(address)
+ map_tagged_address = self.reader.ReadUIntPtr(address)
if tagged_address == map_tagged_address:
# Meta map?
meta_map = Map(self, None, address)
@@ -776,7 +970,7 @@ class V8Heap(object):
meta_map.map = meta_map
object = meta_map
else:
- map = self.FindObject(map_tagged_address)
+ map = self.FindMap(map_tagged_address)
if map is None: return None
instance_type_name = INSTANCE_TYPES.get(map.instance_type)
if instance_type_name is None: return None
@@ -785,9 +979,37 @@ class V8Heap(object):
self.objects[tagged_address] = object
return object
+ def FindMap(self, tagged_address):
+ if (tagged_address & self.MapAlignmentMask()) != 1: return None
+ address = tagged_address - 1
+ if not self.reader.IsValidAddress(address): return None
+ object = Map(self, None, address)
+ return object
+
+ def IntSize(self):
+ return 4
+
+ def PointerSize(self):
+ return self.reader.PointerSize()
+
+ def ObjectAlignmentMask(self):
+ return self.PointerSize() - 1
+
+ def MapAlignmentMask(self):
+ if self.reader.arch == MD_CPU_ARCHITECTURE_AMD64:
+ return (1 << 4) - 1
+ elif self.reader.arch == MD_CPU_ARCHITECTURE_X86:
+ return (1 << 5) - 1
+
EIP_PROXIMITY = 64
+CONTEXT_FOR_ARCH = {
+ MD_CPU_ARCHITECTURE_AMD64:
+ ['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip'],
+ MD_CPU_ARCHITECTURE_X86:
+ ['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
+}
def AnalyzeMinidump(options, minidump_name):
reader = MinidumpReader(options, minidump_name)
@@ -800,40 +1022,35 @@ def AnalyzeMinidump(options, minidump_name):
print " thread id: %d" % exception_thread.id
print " code: %08X" % reader.exception.exception.code
print " context:"
- print " eax: %08x" % reader.exception_context.eax
- print " ebx: %08x" % reader.exception_context.ebx
- print " ecx: %08x" % reader.exception_context.ecx
- print " edx: %08x" % reader.exception_context.edx
- print " edi: %08x" % reader.exception_context.edi
- print " esi: %08x" % reader.exception_context.esi
- print " ebp: %08x" % reader.exception_context.ebp
- print " esp: %08x" % reader.exception_context.esp
- print " eip: %08x" % reader.exception_context.eip
+ for r in CONTEXT_FOR_ARCH[reader.arch]:
+ print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
# TODO(vitalyr): decode eflags.
print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
print
+ stack_top = reader.ExceptionSP()
stack_bottom = exception_thread.stack.start + \
exception_thread.stack.memory.data_size
- stack_map = {reader.exception_context.eip: -1}
- for slot in xrange(reader.exception_context.esp, stack_bottom, 4):
- maybe_address = reader.ReadU32(slot)
+ stack_map = {reader.ExceptionIP(): -1}
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
if not maybe_address in stack_map:
stack_map[maybe_address] = slot
heap = V8Heap(reader, stack_map)
print "Disassembly around exception.eip:"
- start = reader.exception_context.eip - EIP_PROXIMITY
+ start = reader.ExceptionIP() - EIP_PROXIMITY
lines = reader.GetDisasmLines(start, 2 * EIP_PROXIMITY)
for line in lines:
print FormatDisasmLine(start, heap, line)
print
print "Annotated stack (from exception.esp to bottom):"
- for slot in xrange(reader.exception_context.esp, stack_bottom, 4):
- maybe_address = reader.ReadU32(slot)
+ for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
+ maybe_address = reader.ReadUIntPtr(slot)
heap_object = heap.FindObject(maybe_address)
- print "%08x: %08x" % (slot, maybe_address)
+ print "%s: %s" % (reader.FormatIntPtr(slot),
+ reader.FormatIntPtr(maybe_address))
if heap_object:
heap_object.Print(Printer())
print
diff --git a/src/3rdparty/v8/tools/gyp/v8.gyp b/src/3rdparty/v8/tools/gyp/v8.gyp
index b4d074b..46f85fe 100644
--- a/src/3rdparty/v8/tools/gyp/v8.gyp
+++ b/src/3rdparty/v8/tools/gyp/v8.gyp
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -32,6 +32,7 @@
'targets': [
{
'target_name': 'v8',
+ 'dependencies_traverse': 1,
'conditions': [
['want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
@@ -39,10 +40,16 @@
'toolsets': ['target'],
}],
['v8_use_snapshot=="true"', {
- 'dependencies': ['v8_snapshot'],
+ # The dependency on v8_base should come from a transitive
+ # dependency however the Android toolchain requires libv8_base.a
+ # to appear before libv8_snapshot.a so it's listed explicitly.
+ 'dependencies': ['v8_base', 'v8_snapshot'],
},
{
- 'dependencies': ['v8_nosnapshot'],
+ # The dependency on v8_base should come from a transitive
+ # dependency however the Android toolchain requires libv8_base.a
+ # to appear before libv8_snapshot.a so it's listed explicitly.
+ 'dependencies': ['v8_base', 'v8_nosnapshot'],
}],
['component=="shared_library"', {
'type': '<(component)',
@@ -52,6 +59,11 @@
'../../src/v8dll-main.cc',
],
'conditions': [
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'OTHER_LDFLAGS': ['-dynamiclib', '-all_load']
+ },
+ }],
['OS=="win"', {
'defines': [
'BUILDING_V8_SHARED',
@@ -72,11 +84,7 @@
},
}],
['soname_version!=""', {
- # Ideally, we'd like to specify the full filename for the
- # library and set it to "libv8.so.<(soname_version)",
- # but currently the best we can do is use 'product_name' and
- # get "libv8-<(soname_version).so".
- 'product_name': 'v8-<(soname_version)',
+ 'product_extension': 'so.<(soname_version)',
}],
],
},
@@ -225,6 +233,9 @@
{
'target_name': 'v8_base',
'type': '<(library)',
+ 'variables': {
+ 'optimize': 'max',
+ },
'include_dirs+': [
'../../src',
],
@@ -240,8 +251,8 @@
'../../src/assembler.cc',
'../../src/assembler.h',
'../../src/ast.cc',
- '../../src/ast-inl.h',
'../../src/ast.h',
+ '../../src/atomicops.h',
'../../src/atomicops_internals_x86_gcc.cc',
'../../src/bignum.cc',
'../../src/bignum.h',
@@ -283,6 +294,8 @@
'../../src/cpu-profiler.h',
'../../src/data-flow.cc',
'../../src/data-flow.h',
+ '../../src/date.cc',
+ '../../src/date.h',
'../../src/dateparser.cc',
'../../src/dateparser.h',
'../../src/dateparser-inl.h',
@@ -326,7 +339,6 @@
'../../src/handles-inl.h',
'../../src/handles.cc',
'../../src/handles.h',
- '../../src/hashmap.cc',
'../../src/hashmap.h',
'../../src/heap-inl.h',
'../../src/heap.cc',
@@ -344,6 +356,8 @@
'../../src/incremental-marking.h',
'../../src/inspector.cc',
'../../src/inspector.h',
+ '../../src/interface.cc',
+ '../../src/interface.h',
'../../src/interpreter-irregexp.cc',
'../../src/interpreter-irregexp.h',
'../../src/json-parser.h',
@@ -351,6 +365,7 @@
'../../src/jsregexp.h',
'../../src/isolate.cc',
'../../src/isolate.h',
+ '../../src/lazy-instance.h',
'../../src/list-inl.h',
'../../src/list.h',
'../../src/lithium.cc',
@@ -381,8 +396,11 @@
'../../src/objects-visiting.h',
'../../src/objects.cc',
'../../src/objects.h',
+ '../../src/once.cc',
+ '../../src/once.h',
'../../src/parser.cc',
'../../src/parser.h',
+ '../../src/platform-posix.h',
'../../src/platform-tls-mac.h',
'../../src/platform-tls-win32.h',
'../../src/platform-tls.h',
@@ -396,6 +414,7 @@
'../../src/prettyprinter.h',
'../../src/property.cc',
'../../src/property.h',
+ '../../src/property-details.h',
'../../src/profile-generator-inl.h',
'../../src/profile-generator.cc',
'../../src/profile-generator.h',
@@ -554,6 +573,40 @@
'../../src/ia32/stub-cache-ia32.cc',
],
}],
+ ['v8_target_arch=="mips"', {
+ 'sources': [
+ '../../src/mips/assembler-mips.cc',
+ '../../src/mips/assembler-mips.h',
+ '../../src/mips/assembler-mips-inl.h',
+ '../../src/mips/builtins-mips.cc',
+ '../../src/mips/codegen-mips.cc',
+ '../../src/mips/codegen-mips.h',
+ '../../src/mips/code-stubs-mips.cc',
+ '../../src/mips/code-stubs-mips.h',
+ '../../src/mips/constants-mips.cc',
+ '../../src/mips/constants-mips.h',
+ '../../src/mips/cpu-mips.cc',
+ '../../src/mips/debug-mips.cc',
+ '../../src/mips/deoptimizer-mips.cc',
+ '../../src/mips/disasm-mips.cc',
+ '../../src/mips/frames-mips.cc',
+ '../../src/mips/frames-mips.h',
+ '../../src/mips/full-codegen-mips.cc',
+ '../../src/mips/ic-mips.cc',
+ '../../src/mips/lithium-codegen-mips.cc',
+ '../../src/mips/lithium-codegen-mips.h',
+ '../../src/mips/lithium-gap-resolver-mips.cc',
+ '../../src/mips/lithium-gap-resolver-mips.h',
+ '../../src/mips/lithium-mips.cc',
+ '../../src/mips/lithium-mips.h',
+ '../../src/mips/macro-assembler-mips.cc',
+ '../../src/mips/macro-assembler-mips.h',
+ '../../src/mips/regexp-macro-assembler-mips.cc',
+ '../../src/mips/regexp-macro-assembler-mips.h',
+ '../../src/mips/simulator-mips.cc',
+ '../../src/mips/stub-cache-mips.cc',
+ ],
+ }],
['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', {
'sources': [
'../../src/x64/assembler-x64-inl.h',
@@ -609,22 +662,23 @@
'../../src/platform-posix.cc',
],
'conditions': [
- ['host_os=="mac" and _toolset!="target"', {
- 'sources': [
- '../../src/platform-macos.cc'
- ]
+ ['host_os=="mac"', {
+ 'target_conditions': [
+ ['_toolset=="host"', {
+ 'sources': [
+ '../../src/platform-macos.cc'
+ ]
+ }, {
+ 'sources': [
+ '../../src/platform-linux.cc'
+ ]
+ }],
+ ],
}, {
'sources': [
'../../src/platform-linux.cc'
]
}],
- ['_toolset=="target"', {
- 'link_settings': {
- 'libraries': [
- '-llog',
- ],
- }
- }],
],
},
],
@@ -650,7 +704,22 @@
],
}
],
+ ['OS=="netbsd"', {
+ 'link_settings': {
+ 'libraries': [
+ '-L/usr/pkg/lib -Wl,-R/usr/pkg/lib -lexecinfo',
+ ]},
+ 'sources': [
+ '../../src/platform-openbsd.cc',
+ '../../src/platform-posix.cc'
+ ],
+ }
+ ],
['OS=="solaris"', {
+ 'link_settings': {
+ 'libraries': [
+ '-lsocket -lnsl',
+ ]},
'sources': [
'../../src/platform-solaris.cc',
'../../src/platform-posix.cc',
@@ -680,6 +749,11 @@
'V8_SHARED',
],
}],
+ ['v8_postmortem_support=="true"', {
+ 'sources': [
+ '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
+ ]
+ }],
],
},
{
@@ -756,9 +830,38 @@
],
},
{
+ 'target_name': 'postmortem-metadata',
+ 'type': 'none',
+ 'variables': {
+ 'heapobject_files': [
+ '../../src/objects.h',
+ '../../src/objects-inl.h',
+ ],
+ },
+ 'actions': [
+ {
+ 'action_name': 'gen-postmortem-metadata',
+ 'inputs': [
+ '../../tools/gen-postmortem-metadata.py',
+ '<@(heapobject_files)',
+ ],
+ 'outputs': [
+ '<(SHARED_INTERMEDIATE_DIR)/debug-support.cc',
+ ],
+ 'action': [
+ 'python',
+ '../../tools/gen-postmortem-metadata.py',
+ '<@(_outputs)',
+ '<@(heapobject_files)'
+ ]
+ }
+ ]
+ },
+ {
'target_name': 'mksnapshot',
'type': 'executable',
'dependencies': [
+ 'v8_base',
'v8_nosnapshot',
],
'include_dirs+': [
@@ -817,6 +920,8 @@
'../../include/v8stdint.h',
'../../src/allocation.cc',
'../../src/allocation.h',
+ '../../src/atomicops.h',
+ '../../src/atomicops_internals_x86_gcc.cc',
'../../src/bignum.cc',
'../../src/bignum.h',
'../../src/bignum-dtoa.cc',
@@ -839,10 +944,11 @@
'../../src/fixed-dtoa.cc',
'../../src/fixed-dtoa.h',
'../../src/globals.h',
- '../../src/hashmap.cc',
'../../src/hashmap.h',
'../../src/list-inl.h',
'../../src/list.h',
+ '../../src/once.cc',
+ '../../src/once.h',
'../../src/preparse-data-format.h',
'../../src/preparse-data.cc',
'../../src/preparse-data.h',
diff --git a/src/3rdparty/v8/tools/js2c.py b/src/3rdparty/v8/tools/js2c.py
index fe6a72e..fa559f3 100644
--- a/src/3rdparty/v8/tools/js2c.py
+++ b/src/3rdparty/v8/tools/js2c.py
@@ -128,12 +128,13 @@ def ExpandMacros(lines, macros):
end = pattern_match.end()
assert lines[end - 1] == '('
last_match = end
- arg_index = 0
+ arg_index = [0] # Wrap state into array, to work around Python "scoping"
mapping = { }
def add_arg(str):
# Remember to expand recursively in the arguments
replacement = ExpandMacros(str.strip(), macros)
- mapping[macro.args[arg_index]] = replacement
+ mapping[macro.args[arg_index[0]]] = replacement
+ arg_index[0] += 1
while end < len(lines) and height > 0:
# We don't count commas at higher nesting levels.
if lines[end] == ',' and height == 1:
@@ -194,14 +195,14 @@ def ReadMacros(lines):
macro_match = MACRO_PATTERN.match(line)
if macro_match:
name = macro_match.group(1)
- args = args = [match.strip() for match in macro_match.group(2).split(',')]
+ args = map(string.strip, macro_match.group(2).split(','))
body = macro_match.group(3).strip()
macros.append((re.compile("\\b%s\\(" % name), TextMacro(args, body)))
else:
python_match = PYTHON_MACRO_PATTERN.match(line)
if python_match:
name = python_match.group(1)
- args = [match.strip() for match in python_match.group(2).split(',')]
+ args = map(string.strip, python_match.group(2).split(','))
body = python_match.group(3).strip()
fun = eval("lambda " + ",".join(args) + ': ' + body)
macros.append((re.compile("\\b%s\\(" % name), PythonMacro(args, fun)))
diff --git a/src/3rdparty/v8/tools/jsmin.py b/src/3rdparty/v8/tools/jsmin.py
index 395441b..e82f3d0 100644
--- a/src/3rdparty/v8/tools/jsmin.py
+++ b/src/3rdparty/v8/tools/jsmin.py
@@ -154,7 +154,7 @@ class JavaScriptMinifier(object):
return var_name
while True:
identifier_first_char = self.identifier_counter % 52
- identifier_second_char = self.identifier_counter // 52
+ identifier_second_char = self.identifier_counter / 52
new_identifier = self.CharFromNumber(identifier_first_char)
if identifier_second_char != 0:
new_identifier = (
@@ -232,7 +232,9 @@ class JavaScriptMinifier(object):
# A regexp that matches a regexp literal surrounded by /slashes/.
# Don't allow a regexp to have a ) before the first ( since that's a
# syntax error and it's probably just two unrelated slashes.
- slash_quoted_regexp = r"/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
+ # Also don't allow it to come after anything that can only be the
+ # end of a primary expression.
+ slash_quoted_regexp = r"(?<![\w$'\")\]])/(?:(?=\()|(?:[^()/\\]|\\.)+)(?:\([^/\\]|\\.)*/"
# Replace multiple spaces with a single space.
line = re.sub("|".join([double_quoted_string,
single_quoted_string,
diff --git a/src/3rdparty/v8/tools/ll_prof.py b/src/3rdparty/v8/tools/ll_prof.py
index 30d10c3..51ba672 100755
--- a/src/3rdparty/v8/tools/ll_prof.py
+++ b/src/3rdparty/v8/tools/ll_prof.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -334,6 +334,7 @@ class LogReader(object):
_ARCH_TO_POINTER_TYPE_MAP = {
"ia32": ctypes.c_uint32,
"arm": ctypes.c_uint32,
+ "mips": ctypes.c_uint32,
"x64": ctypes.c_uint64
}
@@ -672,7 +673,9 @@ OBJDUMP_SECTION_HEADER_RE = re.compile(
OBJDUMP_SYMBOL_LINE_RE = re.compile(
r"^([a-f0-9]+)\s(.{7})\s(\S+)\s+([a-f0-9]+)\s+(?:\.hidden\s+)?(.*)$")
OBJDUMP_DYNAMIC_SYMBOLS_START_RE = re.compile(
- r"^DYNAMIC SYMBOL TABLE")
+ r"^DYNAMIC SYMBOL TABLE")
+OBJDUMP_SKIP_RE = re.compile(
+ r"^.*ld\.so\.cache$")
KERNEL_ALLSYMS_FILE = "/proc/kallsyms"
PERF_KERNEL_ALLSYMS_RE = re.compile(
r".*kallsyms.*")
@@ -691,6 +694,8 @@ class LibraryRepo(object):
# is 0.
if mmap_info.tid == 0 and not options.kernel:
return True
+ if OBJDUMP_SKIP_RE.match(mmap_info.filename):
+ return True
if PERF_KERNEL_ALLSYMS_RE.match(mmap_info.filename):
return self._LoadKernelSymbols(code_map)
self.infos.append(mmap_info)
diff --git a/src/3rdparty/v8/tools/merge-to-branch.sh b/src/3rdparty/v8/tools/merge-to-branch.sh
new file mode 100644
index 0000000..aa590a3
--- /dev/null
+++ b/src/3rdparty/v8/tools/merge-to-branch.sh
@@ -0,0 +1,280 @@
+#!/bin/bash
+# Copyright 2012 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+########## Global variable definitions
+
+BRANCHNAME=prepare-merge
+PERSISTFILE_BASENAME=/tmp/v8-merge-to-branch-tempfile
+ALREADY_MERGING_SENTINEL_FILE="$PERSISTFILE_BASENAME-already-merging"
+COMMIT_HASHES_FILE="$PERSISTFILE_BASENAME-PATCH_COMMIT_HASHES"
+TEMPORARY_PATCH_FILE="$PERSISTFILE_BASENAME-temporary-patch"
+
+########## Function definitions
+
+source $(dirname $BASH_SOURCE)/common-includes.sh
+
+usage() {
+cat << EOF
+usage: $0 [OPTIONS]... [BRANCH] [REVISION]...
+
+Performs the necessary steps to merge revisions from bleeding_edge
+to other branches, including trunk.
+
+OPTIONS:
+ -h Show this message
+ -s Specify the step where to start work. Default: 0.
+ -p Specify a patch file to apply as part of the merge
+ -m Specify a commit message for the patch
+ -r Reverse specified patches
+EOF
+}
+
+persist_patch_commit_hashes() {
+ echo "PATCH_COMMIT_HASHES=( ${PATCH_COMMIT_HASHES[@]} )" > $COMMIT_HASHES_FILE
+}
+
+restore_patch_commit_hashes() {
+ source $COMMIT_HASHES_FILE
+}
+
+restore_patch_commit_hashes_if_unset() {
+ [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && restore_patch_commit_hashes
+ [[ "${#PATCH_COMMIT_HASHES[@]}" == 0 ]] && [[ -z "$EXTRA_PATCH" ]] && \
+ die "Variable PATCH_COMMIT_HASHES could not be restored."
+}
+
+########## Option parsing
+
+while getopts ":hs:fp:rm:" OPTION ; do
+ case $OPTION in
+ h) usage
+ exit 0
+ ;;
+ p) EXTRA_PATCH=$OPTARG
+ ;;
+ f) rm -f "$ALREADY_MERGING_SENTINEL_FILE"
+ ;;
+ r) REVERSE_PATCH="--reverse"
+ ;;
+ m) NEW_COMMIT_MSG=$OPTARG
+ ;;
+ s) START_STEP=$OPTARG
+ ;;
+ ?) echo "Illegal option: -$OPTARG"
+ usage
+ exit 1
+ ;;
+ esac
+done
+let OPTION_COUNT=$OPTIND-1
+shift $OPTION_COUNT
+
+########## Regular workflow
+
+# If there is a merge in progress, abort.
+[[ -e "$ALREADY_MERGING_SENTINEL_FILE" ]] && [[ $START_STEP -eq 0 ]] \
+ && die "A merge is already in progress"
+touch "$ALREADY_MERGING_SENTINEL_FILE"
+
+initial_environment_checks
+
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ if [ ${#@} -lt 2 ] ; then
+ if [ -z "$EXTRA_PATCH" ] ; then
+ die "Either a patch file or revision numbers must be specified"
+ fi
+ if [ -z "$NEW_COMMIT_MSG" ] ; then
+ die "You must specify a merge comment if no patches are specified"
+ fi
+ fi
+ echo ">>> Step $CURRENT_STEP: Preparation"
+ MERGE_TO_BRANCH=$1
+ [[ -n "$MERGE_TO_BRANCH" ]] || die "Please specify a branch to merge to"
+ shift
+ persist "MERGE_TO_BRANCH"
+ common_prepare
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create a fresh branch for the patch."
+ restore_if_unset "MERGE_TO_BRANCH"
+ git checkout -b $BRANCHNAME svn/$MERGE_TO_BRANCH \
+ || die "Creating branch $BRANCHNAME failed."
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Find the git \
+revisions associated with the patches."
+ current=0
+ for REVISION in "$@" ; do
+ NEXT_HASH=$(git svn find-rev "r$REVISION" svn/bleeding_edge)
+ [[ -n "$NEXT_HASH" ]] \
+ || die "Cannot determine git hash for r$REVISION"
+ PATCH_COMMIT_HASHES[$current]="$NEXT_HASH"
+ [[ -n "$REVISION_LIST" ]] && REVISION_LIST="$REVISION_LIST,"
+ REVISION_LIST="$REVISION_LIST r$REVISION"
+ let current+=1
+ done
+ if [ -n "$REVISION_LIST" ] ; then
+ if [ -n "$REVERSE_PATCH" ] ; then
+ NEW_COMMIT_MSG="Rollback of$REVISION_LIST in $MERGE_TO_BRANCH branch."
+ else
+ NEW_COMMIT_MSG="Merged$REVISION_LIST into $MERGE_TO_BRANCH branch."
+ fi;
+ fi;
+
+ echo "$NEW_COMMIT_MSG" > $COMMITMSG_FILE
+ echo "" >> $COMMITMSG_FILE
+ for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
+ PATCH_MERGE_DESCRIPTION=$(git log -1 --format=%s $HASH)
+ echo "$PATCH_MERGE_DESCRIPTION" >> $COMMITMSG_FILE
+ echo "" >> $COMMITMSG_FILE
+ done
+ for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
+ BUG=$(git log -1 $HASH | grep "BUG=" | awk -F '=' '{print $NF}')
+ if [ -n "$BUG" ] ; then
+ [[ -n "$BUG_AGGREGATE" ]] && BUG_AGGREGATE="$BUG_AGGREGATE,"
+ BUG_AGGREGATE="$BUG_AGGREGATE$BUG"
+ fi
+ done
+ if [ -n "$BUG_AGGREGATE" ] ; then
+ echo "BUG=$BUG_AGGREGATE" >> $COMMITMSG_FILE
+ fi
+ persist "NEW_COMMIT_MSG"
+ persist "REVISION_LIST"
+ persist_patch_commit_hashes
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Apply patches for selected revisions."
+ restore_if_unset "MERGE_TO_BRANCH"
+ restore_patch_commit_hashes_if_unset "PATCH_COMMIT_HASHES"
+ rm -f "$TOUCHED_FILES_FILE"
+ for HASH in ${PATCH_COMMIT_HASHES[@]} ; do
+ echo "Applying patch for $HASH to $MERGE_TO_BRANCH..."
+ git log -1 -p $HASH > "$TEMPORARY_PATCH_FILE"
+ apply_patch "$TEMPORARY_PATCH_FILE"
+ done
+ if [ -n "$EXTRA_PATCH" ] ; then
+ apply_patch "$EXTRA_PATCH"
+ fi
+ stage_files
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Prepare $VERSION_FILE."
+ # These version numbers are used again for creating the tag
+ read_and_persist_version
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Increment version number."
+ restore_if_unset "PATCH"
+ NEWPATCH=$(($PATCH + 1))
+ confirm "Automatically increment PATCH_LEVEL? (Saying 'n' will fire up \
+your EDITOR on $VERSION_FILE so you can make arbitrary changes. When \
+you're done, save the file and exit your EDITOR.)"
+ if [ $? -eq 0 ] ; then
+ sed -e "/#define PATCH_LEVEL/s/[0-9]*$/$NEWPATCH/" \
+ -i "$VERSION_FILE"
+ else
+ $EDITOR "$VERSION_FILE"
+ fi
+ read_and_persist_version "NEW"
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to local branch."
+ git commit -a -F "$COMMITMSG_FILE" \
+ || die "'git commit -a' failed."
+fi
+
+upload_step
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to the repository."
+ restore_if_unset "MERGE_TO_BRANCH"
+ git checkout $BRANCHNAME \
+ || die "cannot ensure that the current branch is $BRANCHNAME"
+ wait_for_lgtm
+ git cl dcommit || die "failed to commit to $MERGE_TO_BRANCH"
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Determine svn commit revision"
+ restore_if_unset "NEW_COMMIT_MSG"
+ restore_if_unset "MERGE_TO_BRANCH"
+ git svn fetch || die "'git svn fetch' failed."
+ COMMIT_HASH=$(git log -1 --format=%H --grep="$NEW_COMMIT_MSG" \
+ svn/$MERGE_TO_BRANCH)
+ [[ -z "$COMMIT_HASH" ]] && die "Unable to map git commit to svn revision"
+ SVN_REVISION=$(git svn find-rev $COMMIT_HASH)
+ echo "subversion revision number is r$SVN_REVISION"
+ persist "SVN_REVISION"
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create the tag."
+ restore_if_unset "SVN_REVISION"
+ restore_version_if_unset "NEW"
+ echo "Creating tag svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
+ if [ "$MERGE_TO_BRANCH" == "trunk" ] ; then
+ TO_URL="$MERGE_TO_BRANCH"
+ else
+ TO_URL="branches/$MERGE_TO_BRANCH"
+ fi
+ svn copy -r $SVN_REVISION \
+ https://v8.googlecode.com/svn/$TO_URL \
+ https://v8.googlecode.com/svn/tags/$NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH \
+ -m "Tagging version $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
+ persist "TO_URL"
+fi
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Cleanup."
+ restore_if_unset "SVN_REVISION"
+ restore_if_unset "TO_URL"
+ restore_if_unset "REVISION_LIST"
+ restore_version_if_unset "NEW"
+ common_cleanup
+ echo "*** SUMMARY ***"
+ echo "version: $NEWMAJOR.$NEWMINOR.$NEWBUILD.$NEWPATCH"
+ echo "branch: $TO_URL"
+ echo "svn revision: $SVN_REVISION"
+ [[ -n "$REVISION_LIST" ]] && echo "patches:$REVISION_LIST"
+fi
diff --git a/src/3rdparty/v8/tools/presubmit.py b/src/3rdparty/v8/tools/presubmit.py
index 7af6e3d..a5f4c61 100755
--- a/src/3rdparty/v8/tools/presubmit.py
+++ b/src/3rdparty/v8/tools/presubmit.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -42,6 +42,7 @@ import pickle
import re
import sys
import subprocess
+import multiprocessing
from subprocess import PIPE
# Disabled LINT rules and reason.
@@ -101,6 +102,33 @@ whitespace/todo
""".split()
+LINT_OUTPUT_PATTERN = re.compile(r'^.+[:(]\d+[:)]|^Done processing')
+
+
+def CppLintWorker(command):
+ try:
+ process = subprocess.Popen(command, stderr=subprocess.PIPE)
+ process.wait()
+ out_lines = ""
+ error_count = -1
+ while True:
+ out_line = process.stderr.readline()
+ if out_line == '' and process.poll() != None:
+ break
+ m = LINT_OUTPUT_PATTERN.match(out_line)
+ if m:
+ out_lines += out_line
+ error_count += 1
+ sys.stderr.write(out_lines)
+ return error_count
+ except KeyboardInterrupt:
+ process.kill()
+ except:
+ print('Error running cpplint.py. Please make sure you have depot_tools' +
+ ' in your $PATH. Lint check skipped.')
+ process.kill()
+
+
class FileContentsCache(object):
def __init__(self, sums_file_name):
@@ -206,29 +234,28 @@ class CppLintProcessor(SourceFileProcessor):
return True
filt = '-,' + ",".join(['+' + n for n in ENABLED_LINT_RULES])
- command = ['cpplint.py', '--filter', filt] + join(files)
+ command = ['cpplint.py', '--filter', filt]
local_cpplint = join(path, "tools", "cpplint.py")
if exists(local_cpplint):
- command = ['python', local_cpplint, '--filter', filt] + join(files)
+ command = ['python', local_cpplint, '--filter', filt]
+ commands = join([command + [file] for file in files])
+ count = multiprocessing.cpu_count()
+ pool = multiprocessing.Pool(count)
try:
- process = subprocess.Popen(command, stderr=subprocess.PIPE)
- except:
- print('Error running cpplint.py. Please make sure you have depot_tools' +
- ' in your $PATH. Lint check skipped.')
- return True
- LINT_ERROR_PATTERN = re.compile(r'^(.+)[:(]\d+[:)]')
- while True:
- out_line = process.stderr.readline()
- if out_line == '' and process.poll() != None:
- break
- sys.stderr.write(out_line)
- m = LINT_ERROR_PATTERN.match(out_line)
- if m:
- good_files_cache.RemoveFile(m.group(1))
+ results = pool.map_async(CppLintWorker, commands).get(999999)
+ except KeyboardInterrupt:
+ print "\nCaught KeyboardInterrupt, terminating workers."
+ sys.exit(1)
+
+ for i in range(len(files)):
+ if results[i] > 0:
+ good_files_cache.RemoveFile(files[i])
+ total_errors = sum(results)
+ print "Total errors found: %d" % total_errors
good_files_cache.Save()
- return process.returncode == 0
+ return total_errors == 0
COPYRIGHT_HEADER_PATTERN = re.compile(
diff --git a/src/3rdparty/v8/tools/push-to-trunk.sh b/src/3rdparty/v8/tools/push-to-trunk.sh
index bd5d003..3fb5b34 100755
--- a/src/3rdparty/v8/tools/push-to-trunk.sh
+++ b/src/3rdparty/v8/tools/push-to-trunk.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -31,18 +31,13 @@
BRANCHNAME=prepare-push
TRUNKBRANCH=trunk-push
-TEMP_BRANCH=v8-push-to-trunk-script-temporary-branch
-VERSION_FILE="src/version.cc"
PERSISTFILE_BASENAME=/tmp/v8-push-to-trunk-tempfile
-CHANGELOG_ENTRY_FILE="$PERSISTFILE_BASENAME-changelog-entry"
-PATCH_FILE="$PERSISTFILE_BASENAME-patch"
-COMMITMSG_FILE="$PERSISTFILE_BASENAME-commitmsg"
-TOUCHED_FILES_FILE="$PERSISTFILE_BASENAME-touched-files"
-STEP=0
-
+CHROME_PATH=
########## Function definitions
+source $(dirname $BASH_SOURCE)/common-includes.sh
+
usage() {
cat << EOF
usage: $0 OPTIONS
@@ -54,71 +49,24 @@ OPTIONS:
-h Show this message
-s Specify the step where to start work. Default: 0.
-l Manually specify the git commit ID of the last push to trunk.
+ -c Specify the path to your Chromium src/ directory to automate the
+ V8 roll.
EOF
}
-die() {
- [[ -n "$1" ]] && echo "Error: $1"
- echo "Exiting."
- exit 1
-}
-
-confirm() {
- echo -n "$1 [Y/n] "
- read ANSWER
- if [[ -z "$ANSWER" || "$ANSWER" == "Y" || "$ANSWER" == "y" ]] ; then
- return 0
- else
- return 1
- fi
-}
-
-delete_branch() {
- local MATCH=$(git branch | grep $1 | awk '{print $NF}' )
- if [ "$MATCH" == "$1" ] ; then
- confirm "Branch $1 exists, do you want to delete it?"
- if [ $? -eq 0 ] ; then
- git branch -D $1 || die "Deleting branch '$1' failed."
- echo "Branch $1 deleted."
- else
- die "Can't continue. Please delete branch $1 and try again."
- fi
- fi
-}
-
-# Persist and restore variables to support canceling/resuming execution
-# of this script.
-persist() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- echo "${!VARNAME}" > $FILE
-}
-
-restore() {
- local VARNAME=$1
- local FILE="$PERSISTFILE_BASENAME-$VARNAME"
- local VALUE="$(cat $FILE)"
- eval "$VARNAME=\"$VALUE\""
-}
-
-restore_if_unset() {
- local VARNAME=$1
- [[ -z "${!VARNAME}" ]] && restore "$VARNAME"
- [[ -z "${!VARNAME}" ]] && die "Variable '$VARNAME' could not be restored."
-}
-
-
########## Option parsing
-while getopts ":hs:l:" OPTION ; do
+while getopts ":hs:l:c:" OPTION ; do
case $OPTION in
h) usage
exit 0
;;
- s) STEP=$OPTARG
+ s) START_STEP=$OPTARG
;;
l) LASTPUSH=$OPTARG
;;
+ c) CHROME_PATH=$OPTARG
+ ;;
?) echo "Illegal option: -$OPTARG"
usage
exit 1
@@ -129,46 +77,24 @@ done
########## Regular workflow
-# Cancel if this is not a git checkout.
-[[ -d .git ]] \
- || die "This is not a git checkout, this script won't work for you."
-
-# Cancel if EDITOR is unset or not executable.
-[[ -n "$EDITOR" && -x "$(which $EDITOR)" ]] \
- || die "Please set your EDITOR environment variable, you'll need it."
-
-if [ $STEP -le 0 ] ; then
- echo ">>> Step 0: Preparation"
- # Check for a clean workdir.
- [[ -z "$(git status -s -uno)" ]] \
- || die "Workspace is not clean. Please commit or undo your changes."
-
- # Persist current branch.
- CURRENT_BRANCH=$(git status -s -b -uno | grep "^##" | awk '{print $2}')
- persist "CURRENT_BRANCH"
- # Get ahold of a safe temporary branch and check it out.
- if [ "$CURRENT_BRANCH" != "$TEMP_BRANCH" ] ; then
- delete_branch $TEMP_BRANCH
- git checkout -b $TEMP_BRANCH
- fi
- # Delete branches if they exist.
- delete_branch $BRANCHNAME
- delete_branch $TRUNKBRANCH
-fi
+initial_environment_checks
-if [ $STEP -le 1 ] ; then
- echo ">>> Step 1: Fetch unfetched revisions."
- git svn fetch || die "'git svn fetch' failed."
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Preparation"
+ common_prepare
+ delete_branch $TRUNKBRANCH
fi
-if [ $STEP -le 2 ] ; then
- echo ">>> Step 2: Create a fresh branch."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create a fresh branch."
git checkout -b $BRANCHNAME svn/bleeding_edge \
|| die "Creating branch $BRANCHNAME failed."
fi
-if [ $STEP -le 3 ] ; then
- echo ">>> Step 3: Detect commit ID of last push to trunk."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Detect commit ID of last push to trunk."
[[ -n "$LASTPUSH" ]] || LASTPUSH=$(git log -1 --format=%H ChangeLog)
LOOP=1
while [ $LOOP -eq 1 ] ; do
@@ -184,15 +110,11 @@ if [ $STEP -le 3 ] ; then
persist "LASTPUSH"
fi
-if [ $STEP -le 4 ] ; then
- echo ">>> Step 4: Prepare raw ChangeLog entry."
-# These version numbers are used again later for the trunk commit.
- MAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "MAJOR"
- MINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "MINOR"
- BUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
- persist "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Prepare raw ChangeLog entry."
+ # These version numbers are used again later for the trunk commit.
+ read_and_persist_version
DATE=$(date +%Y-%m-%d)
persist "DATE"
@@ -205,7 +127,7 @@ if [ $STEP -le 4 ] ; then
# Grep for "BUG=xxxx" lines in the commit message and convert them to
# "(issue xxxx)".
git log -1 $commit --format="%B" \
- | grep "^BUG=" | grep -v "BUG=$" \
+ | grep "^BUG=" | grep -v "BUG=$" | grep -v "BUG=none$" \
| sed -e 's/^/ /' \
| sed -e 's/BUG=v8:\(.*\)$/(issue \1)/' \
| sed -e 's/BUG=\(.*\)$/(Chromium issue \1)/' \
@@ -214,10 +136,13 @@ if [ $STEP -le 4 ] ; then
git log -1 $commit --format="%w(80,8,8)(%an)" >> "$CHANGELOG_ENTRY_FILE"
echo "" >> "$CHANGELOG_ENTRY_FILE"
done
+ echo " Performance and stability improvements on all platforms." \
+ >> "$CHANGELOG_ENTRY_FILE"
fi
-if [ $STEP -le 5 ] ; then
- echo ">>> Step 5: Edit ChangeLog entry."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Edit ChangeLog entry."
echo -n "Please press <Return> to have your EDITOR open the ChangeLog entry, \
then edit its contents to your liking. When you're done, save the file and \
exit your EDITOR. "
@@ -225,7 +150,13 @@ exit your EDITOR. "
$EDITOR "$CHANGELOG_ENTRY_FILE"
NEWCHANGELOG=$(mktemp)
# Eliminate any trailing newlines by going through a shell variable.
- CHANGELOGENTRY=$(cat "$CHANGELOG_ENTRY_FILE")
+ # Also (1) eliminate tabs, (2) fix too little and (3) too much indentation,
+ # and (4) eliminate trailing whitespace.
+ CHANGELOGENTRY=$(cat "$CHANGELOG_ENTRY_FILE" \
+ | sed -e 's/\t/ /g' \
+ | sed -e 's/^ \{1,7\}\([^ ]\)/ \1/g' \
+ | sed -e 's/^ \{9,80\}\([^ ]\)/ \1/g' \
+ | sed -e 's/ \+$//')
[[ -n "$CHANGELOGENTRY" ]] || die "Empty ChangeLog entry."
echo "$CHANGELOGENTRY" > "$NEWCHANGELOG"
echo "" >> "$NEWCHANGELOG" # Explicitly insert two empty lines.
@@ -234,8 +165,9 @@ exit your EDITOR. "
mv "$NEWCHANGELOG" ChangeLog
fi
-if [ $STEP -le 6 ] ; then
- echo ">>> Step 6: Increment version number."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Increment version number."
restore_if_unset "BUILD"
NEWBUILD=$(($BUILD + 1))
confirm "Automatically increment BUILD_NUMBER? (Saying 'n' will fire up \
@@ -247,42 +179,26 @@ you're done, save the file and exit your EDITOR.)"
else
$EDITOR "$VERSION_FILE"
fi
- NEWMAJOR=$(grep "#define MAJOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMAJOR"
- NEWMINOR=$(grep "#define MINOR_VERSION" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWMINOR"
- NEWBUILD=$(grep "#define BUILD_NUMBER" "$VERSION_FILE" | awk '{print $NF}')
- persist "NEWBUILD"
+ read_and_persist_version "NEW"
fi
-if [ $STEP -le 7 ] ; then
- echo ">>> Step 7: Commit to local branch."
- restore_if_unset "NEWMAJOR"
- restore_if_unset "NEWMINOR"
- restore_if_unset "NEWBUILD"
- git commit -a -m "Prepare push to trunk. \
-Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD." \
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to local branch."
+ restore_version_if_unset "NEW"
+ PREPARE_COMMIT_MSG="Prepare push to trunk. \
+Now working on version $NEWMAJOR.$NEWMINOR.$NEWBUILD."
+ persist "PREPARE_COMMIT_MSG"
+ git commit -a -m "$PREPARE_COMMIT_MSG" \
|| die "'git commit -a' failed."
fi
-if [ $STEP -le 8 ] ; then
- echo ">>> Step 8: Upload for code review."
- echo -n "Please enter the email address of a V8 reviewer for your patch: "
- read REVIEWER
- git cl upload -r $REVIEWER --send-mail \
- || die "'git cl upload' failed, please try again."
-fi
+upload_step
-if [ $STEP -le 9 ] ; then
- echo ">>> Step 9: Commit to the repository."
- echo "Please wait for an LGTM, then type \"LGTM<Return>\" to commit your \
-change. (If you need to iterate on the patch, do so in another shell.)"
- unset ANSWER
- while [ "$ANSWER" != "LGTM" ] ; do
- [[ -n "$ANSWER" ]] && echo "That was not 'LGTM'."
- echo -n "> "
- read ANSWER
- done
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to the repository."
+ wait_for_lgtm
# Re-read the ChangeLog entry (to pick up possible changes).
cat ChangeLog | awk --posix '{
if ($0 ~ /^[0-9]{4}-[0-9]{2}-[0-9]{2}:/) {
@@ -297,16 +213,24 @@ change. (If you need to iterate on the patch, do so in another shell.)"
git cl dcommit || die "'git cl dcommit' failed, please try again."
fi
-if [ $STEP -le 10 ] ; then
- echo ">>> Step 10: NOP"
- # Present in the manual guide, not necessary (even harmful!) for this script.
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Fetch straggler commits that sneaked in \
+since this script was started."
+ git svn fetch || die "'git svn fetch' failed."
+ git checkout svn/bleeding_edge
+ restore_if_unset "PREPARE_COMMIT_MSG"
+ PREPARE_COMMIT_HASH=$(git log -1 --format=%H --grep="$PREPARE_COMMIT_MSG")
+ persist "PREPARE_COMMIT_HASH"
fi
-if [ $STEP -le 11 ] ; then
- echo ">>> Step 11: Squash commits into one."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Squash commits into one."
# Instead of relying on "git rebase -i", we'll just create a diff, because
# that's easier to automate.
- git diff svn/trunk > "$PATCH_FILE"
+ restore_if_unset "PREPARE_COMMIT_HASH"
+ git diff svn/trunk $PREPARE_COMMIT_HASH > "$PATCH_FILE"
# Convert the ChangeLog entry to commit message format:
# - remove date
# - remove indentation
@@ -328,54 +252,29 @@ if [ $STEP -le 11 ] ; then
need_space = 1;
}
}' > "$COMMITMSG_FILE" || die "Commit message editing failed."
- LOOP=1
- while [ $LOOP -eq 1 ] ; do
- echo "This is the trunk commit message:"
- echo "--------------------"
- cat "$COMMITMSG_FILE"
- echo -e "\n--------------------"
- confirm "Does this look good to you? (Saying 'n' will fire up your \
-EDITOR so you can change the commit message. When you're done, save the \
-file and exit your EDITOR.)"
- if [ $? -eq 0 ] ; then
- LOOP=0
- else
- $EDITOR "$COMMITMSG_FILE"
- fi
- done
rm -f "$CHANGELOG_ENTRY_FILE"
fi
-if [ $STEP -le 12 ] ; then
- echo ">>> Step 12: Create a new branch from trunk."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create a new branch from trunk."
git checkout -b $TRUNKBRANCH svn/trunk \
|| die "Checking out a new branch '$TRUNKBRANCH' failed."
fi
-if [ $STEP -le 13 ] ; then
- echo ">>> Step 13: Apply squashed changes."
- patch -p1 < "$PATCH_FILE" | tee >(awk '{print $NF}' >> "$TOUCHED_FILES_FILE")
- [[ $? -eq 0 ]] || die "Applying the patch to trunk failed."
- # Stage added and modified files.
- TOUCHED_FILES=$(cat "$TOUCHED_FILES_FILE")
- for FILE in $TOUCHED_FILES ; do
- git add "$FILE"
- done
- # Stage deleted files.
- DELETED_FILES=$(git status -s -uno --porcelain | grep "^ D" \
- | awk '{print $NF}')
- for FILE in $DELETED_FILES ; do
- git rm "$FILE"
- done
- rm -f "$PATCH_FILE"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Apply squashed changes."
rm -f "$TOUCHED_FILES_FILE"
+ apply_patch "$PATCH_FILE"
+ stage_files
+ rm -f "$PATCH_FILE"
fi
-if [ $STEP -le 14 ] ; then
- echo ">>> Step 14: Set correct version for trunk."
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Set correct version for trunk."
+ restore_version_if_unset
sed -e "/#define MAJOR_VERSION/s/[0-9]*$/$MAJOR/" \
-e "/#define MINOR_VERSION/s/[0-9]*$/$MINOR/" \
-e "/#define BUILD_NUMBER/s/[0-9]*$/$BUILD/" \
@@ -384,52 +283,110 @@ if [ $STEP -le 14 ] ; then
-i "$VERSION_FILE" || die "Patching $VERSION_FILE failed."
fi
-if [ $STEP -le 15 ] ; then
- echo ">>> Step 15: Commit to local trunk branch."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to local trunk branch."
git add "$VERSION_FILE"
git commit -F "$COMMITMSG_FILE" || die "'git commit' failed."
rm -f "$COMMITMSG_FILE"
fi
-if [ $STEP -le 16 ] ; then
- echo ">>> Step 16: Sanity check."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Sanity check."
confirm "Please check if your local checkout is sane: Inspect $VERSION_FILE, \
compile, run tests. Do you want to commit this new trunk revision to the \
repository?"
[[ $? -eq 0 ]] || die "Execution canceled."
fi
-if [ $STEP -le 17 ] ; then
- echo ">>> Step 17. Commit to SVN."
- git svn dcommit || die "'git svn dcommit' failed."
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Commit to SVN."
+ git svn dcommit | tee >(grep -E "^Committed r[0-9]+" \
+ | sed -e 's/^Committed r\([0-9]\+\)/\1/' \
+ > "$TRUNK_REVISION_FILE") \
+ || die "'git svn dcommit' failed."
+ TRUNK_REVISION=$(cat "$TRUNK_REVISION_FILE")
+ persist "TRUNK_REVISION"
+ rm -f "$TRUNK_REVISION_FILE"
fi
-if [ $STEP -le 18 ] ; then
- echo ">>> Step 18: Tag the new revision."
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Tag the new revision."
+ restore_version_if_unset
git svn tag $MAJOR.$MINOR.$BUILD -m "Tagging version $MAJOR.$MINOR.$BUILD" \
|| die "'git svn tag' failed."
fi
-if [ $STEP -le 19 ] ; then
- echo ">>> Step 19: Cleanup."
- restore_if_unset "CURRENT_BRANCH"
- git checkout -f $CURRENT_BRANCH
- [[ "$TEMP_BRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TEMP_BRANCH
- [[ "$BRANCHNAME" != "$CURRENT_BRANCH" ]] && git branch -D $BRANCHNAME
- [[ "$TRUNKBRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TRUNKBRANCH
-fi
+if [ -n "$CHROME_PATH" ] ; then
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Switch to Chromium checkout."
+ V8_PATH=$(pwd)
+ persist "V8_PATH"
+ cd "$CHROME_PATH"
+ initial_environment_checks
+ # Check for a clean workdir.
+ [[ -z "$(git status -s -uno)" ]] \
+ || die "Workspace is not clean. Please commit or undo your changes."
+ # Assert that the DEPS file is there.
+ [[ -w "DEPS" ]] || die "DEPS file not present or not writable; \
+current directory is: $(pwd)."
+ fi
-if [ $STEP -le 20 ] ; then
- echo ">>> Step 20: Done!"
- restore_if_unset "MAJOR"
- restore_if_unset "MINOR"
- restore_if_unset "BUILD"
- echo "Congratulations, you have successfully created the trunk revision \
-$MAJOR.$MINOR.$BUILD. Please don't forget to update the v8rel spreadsheet, \
-and to roll this new version into Chromium."
- # Clean up all temporary files.
- rm -f "$PERSISTFILE_BASENAME"*
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Update the checkout and create a new branch."
+ git checkout master || die "'git checkout master' failed."
+ git pull || die "'git pull' failed, please try again."
+ restore_if_unset "TRUNK_REVISION"
+ git checkout -b "v8-roll-$TRUNK_REVISION" \
+ || die "Failed to checkout a new branch."
+ fi
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Create and upload CL."
+ # Patch DEPS file.
+ sed -r -e "/\"v8_revision\": /s/\"[0-9]+\"/\"$TRUNK_REVISION\"/" \
+ -i DEPS
+ restore_version_if_unset
+ echo -n "Please enter the email address of a reviewer for the roll CL: "
+ read REVIEWER
+ git commit -am "Update V8 to version $MAJOR.$MINOR.$BUILD.
+
+TBR=$REVIEWER" || die "'git commit' failed."
+ git cl upload --send-mail \
+ || die "'git cl upload' failed, please try again."
+ echo "CL uploaded."
+ fi
+
+ let CURRENT_STEP+=1
+ if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Returning to V8 checkout."
+ restore_if_unset "V8_PATH"
+ cd "$V8_PATH"
+ fi
+fi # if [ -n "$CHROME_PATH" ]
+
+let CURRENT_STEP+=1
+if [ $START_STEP -le $CURRENT_STEP ] ; then
+ echo ">>> Step $CURRENT_STEP: Done!"
+ restore_version_if_unset
+ restore_if_unset "TRUNK_REVISION"
+ if [ -n "$CHROME_PATH" ] ; then
+ echo "Congratulations, you have successfully created the trunk revision \
+$MAJOR.$MINOR.$BUILD and rolled it into Chromium. Please don't forget to \
+update the v8rel spreadsheet:"
+ else
+ echo "Congratulations, you have successfully created the trunk revision \
+$MAJOR.$MINOR.$BUILD. Please don't forget to roll this new version into \
+Chromium, and to update the v8rel spreadsheet:"
+ fi
+ echo -e "$MAJOR.$MINOR.$BUILD\ttrunk\t$TRUNK_REVISION"
+ common_cleanup
+ [[ "$TRUNKBRANCH" != "$CURRENT_BRANCH" ]] && git branch -D $TRUNKBRANCH
fi
diff --git a/src/3rdparty/v8/tools/test-wrapper-gypbuild.py b/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
index a990b7e..fda4105 100755
--- a/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
+++ b/src/3rdparty/v8/tools/test-wrapper-gypbuild.py
@@ -73,6 +73,8 @@ def BuildOptions():
choices=PROGRESS_INDICATORS, default="mono")
result.add_option("--report", help="Print a summary of the tests to be run",
default=False, action="store_true")
+ result.add_option("--download-data", help="Download missing test suite data",
+ default=False, action="store_true")
result.add_option("-s", "--suite", help="A test suite",
default=[], action="append")
result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -146,7 +148,7 @@ def ProcessOptions(options):
print "Unknown mode %s" % mode
return False
for arch in options.arch:
- if not arch in ['ia32', 'x64', 'arm']:
+ if not arch in ['ia32', 'x64', 'arm', 'mips']:
print "Unknown architecture %s" % arch
return False
@@ -161,6 +163,8 @@ def PassOnOptions(options):
result += ['--progress=' + options.progress]
if options.report:
result += ['--report']
+ if options.download_data:
+ result += ['--download-data']
if options.suite != []:
for suite in options.suite:
result += ['--suite=../../test/' + suite]
@@ -193,9 +197,9 @@ def PassOnOptions(options):
if options.crankshaft:
result += ['--crankshaft']
if options.shard_count != 1:
- result += ['--shard_count=%s' % options.shard_count]
+ result += ['--shard-count=%s' % options.shard_count]
if options.shard_run != 1:
- result += ['--shard_run=%s' % options.shard_run]
+ result += ['--shard-run=%s' % options.shard_run]
if options.noprof:
result += ['--noprof']
return result
diff --git a/src/3rdparty/v8/tools/test.py b/src/3rdparty/v8/tools/test.py
index ecc0062..0aacd99 100755
--- a/src/3rdparty/v8/tools/test.py
+++ b/src/3rdparty/v8/tools/test.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# Copyright 2008 the V8 project authors. All rights reserved.
+# Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -472,7 +472,7 @@ def RunProcess(context, timeout, args, **rest):
popen_args = args
prev_error_mode = SEM_INVALID_VALUE
if utils.IsWindows():
- popen_args = '"' + subprocess.list2cmdline(args) + '"'
+ popen_args = subprocess.list2cmdline(args)
if context.suppress_dialogs:
# Try to change the error mode to avoid dialogs on fatal errors. Don't
# touch any existing error mode flags by merging the existing error mode.
@@ -631,9 +631,15 @@ class TestRepository(TestSuite):
def GetBuildRequirements(self, path, context):
return self.GetConfiguration(context).GetBuildRequirements()
+ def DownloadData(self, context):
+ config = self.GetConfiguration(context)
+ if 'DownloadData' in dir(config):
+ config.DownloadData()
+
def AddTestsToList(self, result, current_path, path, context, mode):
- for v in self.GetConfiguration(context).VariantFlags():
- tests = self.GetConfiguration(context).ListTests(current_path, path, mode, v)
+ config = self.GetConfiguration(context)
+ for v in config.VariantFlags():
+ tests = config.ListTests(current_path, path, mode, v)
for t in tests: t.variant_flags = v
result += tests
@@ -655,6 +661,12 @@ class LiteralTestSuite(TestSuite):
result += test.GetBuildRequirements(rest, context)
return result
+ def DownloadData(self, path, context):
+ (name, rest) = CarCdr(path)
+ for test in self.tests:
+ if not name or name.match(test.GetName()):
+ test.DownloadData(context)
+
def ListTests(self, current_path, path, context, mode, variant_flags):
(name, rest) = CarCdr(path)
result = [ ]
@@ -674,8 +686,8 @@ SUFFIX = {
'debug' : '_g',
'release' : '' }
FLAGS = {
- 'debug' : ['--enable-slow-asserts', '--debug-code', '--verify-heap'],
- 'release' : []}
+ 'debug' : ['--nobreak-on-abort', '--enable-slow-asserts', '--debug-code', '--verify-heap'],
+ 'release' : ['--nobreak-on-abort']}
TIMEOUT_SCALEFACTOR = {
'debug' : 4,
'release' : 1 }
@@ -711,7 +723,7 @@ class Context(object):
def GetTimeout(self, testcase, mode):
result = self.timeout * TIMEOUT_SCALEFACTOR[mode]
if '--stress-opt' in self.GetVmFlags(testcase, mode):
- return result * 2
+ return result * 4
else:
return result
@@ -850,6 +862,9 @@ class Operation(Expression):
elif self.op == '==':
inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
return not inter.IsEmpty()
+ elif self.op == '!=':
+ inter = self.left.GetOutcomes(env, defs).Intersect(self.right.GetOutcomes(env, defs))
+ return inter.IsEmpty()
else:
assert self.op == '&&'
return self.left.Evaluate(env, defs) and self.right.Evaluate(env, defs)
@@ -932,6 +947,9 @@ class Tokenizer(object):
elif self.Current(2) == '==':
self.AddToken('==')
self.Advance(2)
+ elif self.Current(2) == '!=':
+ self.AddToken('!=')
+ self.Advance(2)
else:
return None
return self.tokens
@@ -984,7 +1002,7 @@ def ParseAtomicExpression(scan):
return None
-BINARIES = ['==']
+BINARIES = ['==', '!=']
def ParseOperatorExpression(scan):
left = ParseAtomicExpression(scan)
if not left: return None
@@ -1006,7 +1024,7 @@ def ParseConditionalExpression(scan):
right = ParseOperatorExpression(scan)
if not right:
return None
- left= Operation(left, 'if', right)
+ left = Operation(left, 'if', right)
return left
@@ -1186,6 +1204,8 @@ def BuildOptions():
default='scons')
result.add_option("--report", help="Print a summary of the tests to be run",
default=False, action="store_true")
+ result.add_option("--download-data", help="Download missing test suite data",
+ default=False, action="store_true")
result.add_option("-s", "--suite", help="A test suite",
default=[], action="append")
result.add_option("-t", "--timeout", help="Timeout in seconds",
@@ -1211,6 +1231,7 @@ def BuildOptions():
dest="suppress_dialogs", default=True, action="store_true")
result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
dest="suppress_dialogs", action="store_false")
+ result.add_option("--mips-arch-variant", help="mips architecture variant: mips32r1/mips32r2", default="mips32r2");
result.add_option("--shell", help="Path to V8 shell", default="d8")
result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
result.add_option("--store-unexpected-output",
@@ -1272,6 +1293,9 @@ def ProcessOptions(options):
if options.snapshot:
options.scons_flags.append("snapshot=on")
global VARIANT_FLAGS
+ if options.mips_arch_variant:
+ options.scons_flags.append("mips_arch_variant=" + options.mips_arch_variant)
+
if options.stress_only:
VARIANT_FLAGS = [['--stress-opt', '--always-opt']]
if options.nostress:
@@ -1452,6 +1476,11 @@ def Main():
root.GetTestStatus(context, sections, defs)
config = Configuration(sections, defs)
+ # Download missing test suite data if requested.
+ if options.download_data:
+ for path in paths:
+ root.DownloadData(path, context)
+
# List the tests
all_cases = [ ]
all_unused = [ ]
diff --git a/src/3rdparty/v8/tools/tickprocessor-driver.js b/src/3rdparty/v8/tools/tickprocessor-driver.js
index 4201e43..9af5ab6 100644
--- a/src/3rdparty/v8/tools/tickprocessor-driver.js
+++ b/src/3rdparty/v8/tools/tickprocessor-driver.js
@@ -52,6 +52,7 @@ if (params.snapshotLogFileName) {
var tickProcessor = new TickProcessor(
new (entriesProviders[params.platform])(params.nm),
params.separateIc,
+ params.callGraphSize,
params.ignoreUnknown,
params.stateFilter,
snapshotLogProcessor);
diff --git a/src/3rdparty/v8/tools/tickprocessor.js b/src/3rdparty/v8/tools/tickprocessor.js
index 5f57835..05a3369 100644
--- a/src/3rdparty/v8/tools/tickprocessor.js
+++ b/src/3rdparty/v8/tools/tickprocessor.js
@@ -146,7 +146,12 @@ SnapshotLogProcessor.prototype.getSerializedEntryName = function(pos) {
function TickProcessor(
- cppEntriesProvider, separateIc, ignoreUnknown, stateFilter, snapshotLogProcessor) {
+ cppEntriesProvider,
+ separateIc,
+ callGraphSize,
+ ignoreUnknown,
+ stateFilter,
+ snapshotLogProcessor) {
LogReader.call(this, {
'shared-library': { parsers: [null, parseInt, parseInt],
processor: this.processSharedLibrary },
@@ -181,6 +186,7 @@ function TickProcessor(
'end-code-region': null });
this.cppEntriesProvider_ = cppEntriesProvider;
+ this.callGraphSize_ = callGraphSize;
this.ignoreUnknown_ = ignoreUnknown;
this.stateFilter_ = stateFilter;
this.snapshotLogProcessor_ = snapshotLogProcessor;
@@ -240,6 +246,7 @@ TickProcessor.CodeTypes = {
TickProcessor.CALL_PROFILE_CUTOFF_PCT = 2.0;
+TickProcessor.CALL_GRAPH_SIZE = 5;
/**
* @override
@@ -535,7 +542,7 @@ TickProcessor.prototype.printHeavyProfile = function(profile, opt_indent) {
padLeft(rec.parentTotalPercent.toFixed(1), 5) + '% ' +
indentStr + rec.internalFuncName);
// Limit backtrace depth.
- if (indent < 10) {
+ if (indent < 2 * self.callGraphSize_) {
self.printHeavyProfile(rec.children, indent + 2);
}
// Delimit top-level functions.
@@ -764,6 +771,8 @@ function ArgumentsProcessor(args) {
'Show only ticks from OTHER VM state'],
'-e': ['stateFilter', TickProcessor.VmStates.EXTERNAL,
'Show only ticks from EXTERNAL VM state'],
+ '--call-graph-size': ['callGraphSize', TickProcessor.CALL_GRAPH_SIZE,
+ 'Set the call graph size'],
'--ignore-unknown': ['ignoreUnknown', true,
'Exclude ticks of unknown code entries from processing'],
'--separate-ic': ['separateIc', true,
@@ -792,6 +801,7 @@ ArgumentsProcessor.DEFAULTS = {
snapshotLogFileName: null,
platform: 'unix',
stateFilter: null,
+ callGraphSize: 5,
ignoreUnknown: false,
separateIc: false,
nm: 'nm'
diff --git a/src/3rdparty/v8/tools/utils.py b/src/3rdparty/v8/tools/utils.py
index fb94d14..232314c 100644
--- a/src/3rdparty/v8/tools/utils.py
+++ b/src/3rdparty/v8/tools/utils.py
@@ -61,6 +61,8 @@ def GuessOS():
return 'openbsd'
elif id == 'SunOS':
return 'solaris'
+ elif id == 'NetBSD':
+ return 'netbsd'
else:
return None